var/home/core/zuul-output/0000755000175000017500000000000015113630323014522 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113642051015467 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005313460015113642042017675 0ustar rootrootDec 02 18:36:08 crc systemd[1]: Starting Kubernetes Kubelet... Dec 02 18:36:08 crc restorecon[4680]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:08 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 18:36:09 crc restorecon[4680]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 02 18:36:09 crc kubenswrapper[4792]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 18:36:09 crc kubenswrapper[4792]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 02 18:36:09 crc kubenswrapper[4792]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 18:36:09 crc kubenswrapper[4792]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 18:36:09 crc kubenswrapper[4792]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 02 18:36:09 crc kubenswrapper[4792]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.375010 4792 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379183 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379206 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379214 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379220 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379233 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379239 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379246 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379253 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379260 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379266 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379271 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379276 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379285 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379292 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379298 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379304 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379310 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379315 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379321 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379326 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379332 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379337 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379344 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379351 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379358 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379365 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379371 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379378 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379384 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379389 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379395 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379401 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379406 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379411 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379416 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379421 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379427 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379432 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379437 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379442 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379448 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379453 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379459 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379464 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379469 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379474 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379479 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379484 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379489 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379510 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379516 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379539 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379545 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379550 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379555 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379560 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379565 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379570 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379576 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379581 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379586 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379591 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379596 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379601 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379608 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379614 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379619 4792 feature_gate.go:330] unrecognized feature gate: Example Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379624 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379629 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379634 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.379643 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.379960 4792 flags.go:64] FLAG: --address="0.0.0.0" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.379975 4792 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.379987 4792 flags.go:64] FLAG: --anonymous-auth="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.379995 4792 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380003 4792 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380010 4792 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380022 4792 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380030 4792 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380036 4792 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380043 4792 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380049 4792 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380056 4792 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380062 4792 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380068 4792 flags.go:64] FLAG: --cgroup-root="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380073 4792 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380080 4792 flags.go:64] FLAG: --client-ca-file="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380085 4792 flags.go:64] FLAG: --cloud-config="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380091 4792 flags.go:64] FLAG: --cloud-provider="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380097 4792 flags.go:64] FLAG: --cluster-dns="[]" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380105 4792 flags.go:64] FLAG: --cluster-domain="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380112 4792 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380118 4792 flags.go:64] FLAG: --config-dir="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380124 4792 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380131 4792 flags.go:64] FLAG: --container-log-max-files="5" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380139 4792 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380146 4792 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380152 4792 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380159 4792 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380165 4792 flags.go:64] FLAG: --contention-profiling="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380171 4792 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380177 4792 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380183 4792 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380190 4792 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380197 4792 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380204 4792 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380210 4792 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380216 4792 flags.go:64] FLAG: --enable-load-reader="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380222 4792 flags.go:64] FLAG: --enable-server="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380228 4792 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380237 4792 flags.go:64] FLAG: --event-burst="100" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380243 4792 flags.go:64] FLAG: --event-qps="50" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380250 4792 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380257 4792 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380263 4792 flags.go:64] FLAG: --eviction-hard="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380270 4792 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380276 4792 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380283 4792 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380289 4792 flags.go:64] FLAG: --eviction-soft="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380294 4792 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380301 4792 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380307 4792 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380314 4792 flags.go:64] FLAG: --experimental-mounter-path="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380320 4792 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380326 4792 flags.go:64] FLAG: --fail-swap-on="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380332 4792 flags.go:64] FLAG: --feature-gates="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380339 4792 flags.go:64] FLAG: --file-check-frequency="20s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380347 4792 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380353 4792 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380359 4792 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380365 4792 flags.go:64] FLAG: --healthz-port="10248" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380372 4792 flags.go:64] FLAG: --help="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380378 4792 flags.go:64] FLAG: --hostname-override="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380384 4792 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380391 4792 flags.go:64] FLAG: --http-check-frequency="20s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380397 4792 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380403 4792 flags.go:64] FLAG: --image-credential-provider-config="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380409 4792 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380415 4792 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380421 4792 flags.go:64] FLAG: --image-service-endpoint="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380426 4792 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380432 4792 flags.go:64] FLAG: --kube-api-burst="100" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380438 4792 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380445 4792 flags.go:64] FLAG: --kube-api-qps="50" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380450 4792 flags.go:64] FLAG: --kube-reserved="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380456 4792 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380462 4792 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380468 4792 flags.go:64] FLAG: --kubelet-cgroups="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380475 4792 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380481 4792 flags.go:64] FLAG: --lock-file="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380487 4792 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380493 4792 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380499 4792 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380508 4792 flags.go:64] FLAG: --log-json-split-stream="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380515 4792 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380542 4792 flags.go:64] FLAG: --log-text-split-stream="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380549 4792 flags.go:64] FLAG: --logging-format="text" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380554 4792 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380561 4792 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380567 4792 flags.go:64] FLAG: --manifest-url="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380573 4792 flags.go:64] FLAG: --manifest-url-header="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380581 4792 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380587 4792 flags.go:64] FLAG: --max-open-files="1000000" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380594 4792 flags.go:64] FLAG: --max-pods="110" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380601 4792 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380607 4792 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380796 4792 flags.go:64] FLAG: --memory-manager-policy="None" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380801 4792 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380807 4792 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380813 4792 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380820 4792 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380833 4792 flags.go:64] FLAG: --node-status-max-images="50" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380839 4792 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380846 4792 flags.go:64] FLAG: --oom-score-adj="-999" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380852 4792 flags.go:64] FLAG: --pod-cidr="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380859 4792 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380868 4792 flags.go:64] FLAG: --pod-manifest-path="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380875 4792 flags.go:64] FLAG: --pod-max-pids="-1" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380881 4792 flags.go:64] FLAG: --pods-per-core="0" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380887 4792 flags.go:64] FLAG: --port="10250" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380893 4792 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380899 4792 flags.go:64] FLAG: --provider-id="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380905 4792 flags.go:64] FLAG: --qos-reserved="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380910 4792 flags.go:64] FLAG: --read-only-port="10255" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380922 4792 flags.go:64] FLAG: --register-node="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380928 4792 flags.go:64] FLAG: --register-schedulable="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380934 4792 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380945 4792 flags.go:64] FLAG: --registry-burst="10" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380951 4792 flags.go:64] FLAG: --registry-qps="5" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380957 4792 flags.go:64] FLAG: --reserved-cpus="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380963 4792 flags.go:64] FLAG: --reserved-memory="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380971 4792 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380977 4792 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380983 4792 flags.go:64] FLAG: --rotate-certificates="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380989 4792 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.380995 4792 flags.go:64] FLAG: --runonce="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381001 4792 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381007 4792 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381013 4792 flags.go:64] FLAG: --seccomp-default="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381019 4792 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381025 4792 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381032 4792 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381038 4792 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381044 4792 flags.go:64] FLAG: --storage-driver-password="root" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381050 4792 flags.go:64] FLAG: --storage-driver-secure="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381057 4792 flags.go:64] FLAG: --storage-driver-table="stats" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381063 4792 flags.go:64] FLAG: --storage-driver-user="root" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381068 4792 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381075 4792 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381081 4792 flags.go:64] FLAG: --system-cgroups="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381087 4792 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381096 4792 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381102 4792 flags.go:64] FLAG: --tls-cert-file="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381108 4792 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381115 4792 flags.go:64] FLAG: --tls-min-version="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381120 4792 flags.go:64] FLAG: --tls-private-key-file="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381129 4792 flags.go:64] FLAG: --topology-manager-policy="none" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381134 4792 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381140 4792 flags.go:64] FLAG: --topology-manager-scope="container" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381148 4792 flags.go:64] FLAG: --v="2" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381157 4792 flags.go:64] FLAG: --version="false" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381165 4792 flags.go:64] FLAG: --vmodule="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381172 4792 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381179 4792 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381333 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381339 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381347 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381354 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381361 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381367 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381372 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381378 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381384 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381390 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381396 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381403 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381410 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381417 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381423 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381428 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381433 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381438 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381444 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381449 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381454 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381459 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381464 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381470 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381478 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381483 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381488 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381493 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381498 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381503 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381508 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381514 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381540 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381546 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381551 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381556 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381562 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381567 4792 feature_gate.go:330] unrecognized feature gate: Example Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381572 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381577 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381582 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381587 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381594 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381601 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381607 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381613 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381618 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381624 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381630 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381636 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381642 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381648 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381653 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381658 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381663 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381669 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381677 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381682 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381687 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381692 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381698 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381703 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381708 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381713 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381718 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381724 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381729 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381734 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381739 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381745 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.381750 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.381758 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.392931 4792 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.392986 4792 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393157 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393169 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393175 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393181 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393186 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393191 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393195 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393200 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393205 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393212 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393222 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393229 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393234 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393240 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393245 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393251 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393256 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393261 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393266 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393271 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393277 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393282 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393287 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393291 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393297 4792 feature_gate.go:330] unrecognized feature gate: Example Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393302 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393307 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393312 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393317 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393322 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393329 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393336 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393342 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393347 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393363 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393369 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393375 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393381 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393386 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393391 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393396 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393402 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393407 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393412 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393416 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393421 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393426 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393431 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393436 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393441 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393446 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393451 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393456 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393461 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393467 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393472 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393478 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393485 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393491 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393496 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393501 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393505 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393512 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393518 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393544 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393549 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393554 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393559 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393564 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393568 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393583 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.393593 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393815 4792 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393859 4792 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393865 4792 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393871 4792 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393875 4792 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393880 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393885 4792 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393891 4792 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393895 4792 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393910 4792 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393915 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393919 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393924 4792 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393929 4792 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393936 4792 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393942 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393949 4792 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393955 4792 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393961 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393967 4792 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393975 4792 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393982 4792 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393988 4792 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393993 4792 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.393999 4792 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394004 4792 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394010 4792 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394016 4792 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394022 4792 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394027 4792 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394032 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394038 4792 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394043 4792 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394048 4792 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394062 4792 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394067 4792 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394072 4792 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394077 4792 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394082 4792 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394086 4792 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394092 4792 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394097 4792 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394101 4792 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394106 4792 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394111 4792 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394116 4792 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394121 4792 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394126 4792 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394131 4792 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394135 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394141 4792 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394147 4792 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394155 4792 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394163 4792 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394170 4792 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394177 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394185 4792 feature_gate.go:330] unrecognized feature gate: Example Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394192 4792 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394200 4792 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394206 4792 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394213 4792 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394220 4792 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394225 4792 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394231 4792 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394237 4792 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394242 4792 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394247 4792 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394251 4792 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394256 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394261 4792 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.394276 4792 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.394285 4792 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.394842 4792 server.go:940] "Client rotation is on, will bootstrap in background" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.404384 4792 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.404625 4792 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.406620 4792 server.go:997] "Starting client certificate rotation" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.406679 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.407103 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-18 05:28:11.783683991 +0000 UTC Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.407246 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.415066 4792 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.417340 4792 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.418540 4792 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.427586 4792 log.go:25] "Validated CRI v1 runtime API" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.443057 4792 log.go:25] "Validated CRI v1 image API" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.445180 4792 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.447844 4792 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-02-18-31-54-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.447885 4792 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.468829 4792 manager.go:217] Machine: {Timestamp:2025-12-02 18:36:09.465628758 +0000 UTC m=+0.238521106 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:4d48cf4e-c99d-43e6-acd7-ad269c0425b2 BootID:1ed8f756-400f-4462-b5a1-c3a97e79306e Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:72:a2:d0 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:72:a2:d0 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:4a:ef:27 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:66:ca:8c Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:64:a3:b0 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:8b:46:93 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:72:1c:9b:5a:67:1c Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:fe:bc:d5:4a:c2:07 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.469327 4792 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.469764 4792 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.470473 4792 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.470824 4792 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.470889 4792 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.471265 4792 topology_manager.go:138] "Creating topology manager with none policy" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.471284 4792 container_manager_linux.go:303] "Creating device plugin manager" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.471728 4792 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.471784 4792 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.472276 4792 state_mem.go:36] "Initialized new in-memory state store" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.472423 4792 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.473416 4792 kubelet.go:418] "Attempting to sync node with API server" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.473450 4792 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.473494 4792 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.473541 4792 kubelet.go:324] "Adding apiserver pod source" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.473571 4792 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.475287 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.475393 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.475588 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.475707 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.476008 4792 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.476452 4792 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.477391 4792 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.477946 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.477966 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.477974 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.477982 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.477993 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478000 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478008 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478018 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478028 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478053 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478064 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478072 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478296 4792 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.478798 4792 server.go:1280] "Started kubelet" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.479693 4792 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.480022 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.480420 4792 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 02 18:36:09 crc systemd[1]: Started Kubernetes Kubelet. Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.480427 4792 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.481970 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.188:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d79d172b46d42 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 18:36:09.478655298 +0000 UTC m=+0.251547626,LastTimestamp:2025-12-02 18:36:09.478655298 +0000 UTC m=+0.251547626,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.483053 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.483138 4792 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.483724 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 16:50:07.28003748 +0000 UTC Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.484237 4792 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.484280 4792 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.484379 4792 server.go:460] "Adding debug handlers to kubelet server" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.484576 4792 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.484631 4792 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.485189 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="200ms" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.485225 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.485342 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.485709 4792 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.485754 4792 factory.go:55] Registering systemd factory Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.485773 4792 factory.go:221] Registration of the systemd container factory successfully Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.486157 4792 factory.go:153] Registering CRI-O factory Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.486183 4792 factory.go:221] Registration of the crio container factory successfully Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.486210 4792 factory.go:103] Registering Raw factory Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.486232 4792 manager.go:1196] Started watching for new ooms in manager Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.487031 4792 manager.go:319] Starting recovery of all containers Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.502958 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503057 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503095 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503125 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503151 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503243 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503314 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503341 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503373 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503435 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503465 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503495 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503601 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503635 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503663 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503691 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503725 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503752 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503781 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503808 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503835 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503864 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503940 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503968 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.503997 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504066 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504104 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504135 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504165 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504195 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504223 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504253 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504286 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504314 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504342 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504368 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504394 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504418 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504450 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504479 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504506 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504574 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504605 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.504641 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505170 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505207 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505238 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505266 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505299 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505328 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505390 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505422 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505460 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505492 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505553 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505588 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505620 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505652 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505681 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505712 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505738 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505766 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505793 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505818 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505869 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505898 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505926 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505955 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.505981 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506009 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506036 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506103 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506130 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506159 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506187 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506216 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506243 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506276 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506305 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506334 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506363 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506393 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506422 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506451 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506478 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506505 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506568 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506600 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506627 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506656 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506683 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506710 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506738 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506769 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506799 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506829 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506860 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506886 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506913 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506941 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506967 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.506996 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507022 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507050 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507090 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507120 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507150 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507182 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507209 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507237 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507268 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507299 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507333 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507363 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507387 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507439 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507469 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507493 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507551 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507582 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507609 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507634 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507660 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507686 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507712 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507742 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507769 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.507794 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508391 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508464 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508477 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508491 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508503 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508515 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508550 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508567 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508581 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508603 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508616 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508635 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508647 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508660 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508671 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508683 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508698 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508708 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508719 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508730 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508747 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508760 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508773 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508787 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508800 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508813 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508828 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508841 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508854 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508867 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.508880 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510129 4792 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510207 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510235 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510260 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510281 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510304 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510326 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510348 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510422 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510444 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510465 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510490 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510510 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510554 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510587 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510614 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510670 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510698 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510721 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510743 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510765 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.510786 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511433 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511469 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511506 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511595 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511618 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511634 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511673 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511710 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511732 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511750 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511767 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511808 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511846 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511864 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511901 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511920 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.511962 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512005 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512023 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512059 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512078 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512114 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512131 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512150 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512167 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512182 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512221 4792 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512241 4792 reconstruct.go:97] "Volume reconstruction finished" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.512254 4792 reconciler.go:26] "Reconciler: start to sync state" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.520511 4792 manager.go:324] Recovery completed Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.531690 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.533593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.533626 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.533636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.534156 4792 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.534167 4792 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.534185 4792 state_mem.go:36] "Initialized new in-memory state store" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.536164 4792 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.538288 4792 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.538360 4792 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.538398 4792 kubelet.go:2335] "Starting kubelet main sync loop" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.538475 4792 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.539477 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.539616 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.544991 4792 policy_none.go:49] "None policy: Start" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.545975 4792 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.545998 4792 state_mem.go:35] "Initializing new in-memory state store" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.585332 4792 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.609400 4792 manager.go:334] "Starting Device Plugin manager" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.609485 4792 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.609502 4792 server.go:79] "Starting device plugin registration server" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.610023 4792 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.610037 4792 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.610313 4792 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.610388 4792 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.610395 4792 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.618645 4792 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.639416 4792 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.639553 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.640862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.640892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.640904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.641059 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.641374 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.641459 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642148 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642390 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642561 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642610 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642903 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.642923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.643746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.643772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.644086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.644108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.643796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.644172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.644304 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.644603 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.644677 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.644963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.645013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.645030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.645242 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.645355 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.645412 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646764 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.646886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.647037 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.647078 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.648144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.648191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.648214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.686084 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="400ms" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.711113 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.712143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.712191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.712250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.712310 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.712891 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.188:6443: connect: connection refused" node="crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715067 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715111 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715149 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715175 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715201 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715226 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715298 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715324 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715352 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715389 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715414 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715440 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715464 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715487 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.715512 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816150 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816230 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816273 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816312 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816344 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816380 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816413 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816445 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816485 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816484 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816515 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816619 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816649 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816645 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816691 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816681 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816706 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816505 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816791 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816725 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816903 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816906 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816934 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816967 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816971 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.816967 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.817031 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.817093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.817384 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.913770 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.915578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.915692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.915730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.915786 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 18:36:09 crc kubenswrapper[4792]: E1202 18:36:09.917493 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.188:6443: connect: connection refused" node="crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.968977 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: I1202 18:36:09.990842 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 18:36:09 crc kubenswrapper[4792]: W1202 18:36:09.995205 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a1e500b9c6cc4d138442bbd5747babe3985d5599b26647700a9d8d7e05bad271 WatchSource:0}: Error finding container a1e500b9c6cc4d138442bbd5747babe3985d5599b26647700a9d8d7e05bad271: Status 404 returned error can't find the container with id a1e500b9c6cc4d138442bbd5747babe3985d5599b26647700a9d8d7e05bad271 Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.000131 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.014333 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-6c6bbbbecb266f5d6029e6aa608ea8c28daf5466053b3d98b03117d234b93b72 WatchSource:0}: Error finding container 6c6bbbbecb266f5d6029e6aa608ea8c28daf5466053b3d98b03117d234b93b72: Status 404 returned error can't find the container with id 6c6bbbbecb266f5d6029e6aa608ea8c28daf5466053b3d98b03117d234b93b72 Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.023175 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c2f96cb3f49ab58ce8ee146d48bbb93c081e38254ed54ead14b931081e71f64d WatchSource:0}: Error finding container c2f96cb3f49ab58ce8ee146d48bbb93c081e38254ed54ead14b931081e71f64d: Status 404 returned error can't find the container with id c2f96cb3f49ab58ce8ee146d48bbb93c081e38254ed54ead14b931081e71f64d Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.023361 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.029173 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.045437 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-6457692b20d87265393e821dccd5947a6028aeade6cbb51d4a8d5ebc14946c85 WatchSource:0}: Error finding container 6457692b20d87265393e821dccd5947a6028aeade6cbb51d4a8d5ebc14946c85: Status 404 returned error can't find the container with id 6457692b20d87265393e821dccd5947a6028aeade6cbb51d4a8d5ebc14946c85 Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.062395 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-a1dae2f48102e344fe8e2013c8abebd86c554c07e35aa1c39729b503fe5c05ae WatchSource:0}: Error finding container a1dae2f48102e344fe8e2013c8abebd86c554c07e35aa1c39729b503fe5c05ae: Status 404 returned error can't find the container with id a1dae2f48102e344fe8e2013c8abebd86c554c07e35aa1c39729b503fe5c05ae Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.087273 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="800ms" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.318092 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.320367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.320414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.320424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.320460 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.322720 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.188:6443: connect: connection refused" node="crc" Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.474403 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.474505 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.481090 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.483809 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 03:40:07.215044287 +0000 UTC Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.483854 4792 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 201h3m56.731192934s for next certificate rotation Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.545822 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342" exitCode=0 Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.545915 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.546062 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6c6bbbbecb266f5d6029e6aa608ea8c28daf5466053b3d98b03117d234b93b72"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.546205 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.547081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.547113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.547129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.547862 4792 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b" exitCode=0 Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.547937 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.547969 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a1e500b9c6cc4d138442bbd5747babe3985d5599b26647700a9d8d7e05bad271"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.548059 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.549617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.549655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.549668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.554039 4792 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d" exitCode=0 Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.554142 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.554214 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a1dae2f48102e344fe8e2013c8abebd86c554c07e35aa1c39729b503fe5c05ae"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.554355 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.555192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.555231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.555243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.557171 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c" exitCode=0 Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.557275 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.557336 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6457692b20d87265393e821dccd5947a6028aeade6cbb51d4a8d5ebc14946c85"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.557480 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.558456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.558488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.558500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.559131 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.559169 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c2f96cb3f49ab58ce8ee146d48bbb93c081e38254ed54ead14b931081e71f64d"} Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.564547 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.565336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.565367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:10 crc kubenswrapper[4792]: I1202 18:36:10.565381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.586239 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.586340 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.662993 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.663090 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.687989 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.188:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d79d172b46d42 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 18:36:09.478655298 +0000 UTC m=+0.251547626,LastTimestamp:2025-12-02 18:36:09.478655298 +0000 UTC m=+0.251547626,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.889045 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="1.6s" Dec 02 18:36:10 crc kubenswrapper[4792]: W1202 18:36:10.989146 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.188:6443: connect: connection refused Dec 02 18:36:10 crc kubenswrapper[4792]: E1202 18:36:10.989236 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.188:6443: connect: connection refused" logger="UnhandledError" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.122851 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.124696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.124742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.124755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.124786 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.568794 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.568879 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.568910 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.568938 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.571960 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.572016 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.572024 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.572027 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.573229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.573269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.573278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.573793 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36" exitCode=0 Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.573823 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.573979 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.574756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.574785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.574795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.576150 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f0dabf551bfcd95fbdaf2d0159d4f9f87164313d9d7646303805a4be08a470b4"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.576220 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.577066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.577082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.577091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.579118 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.579148 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.579161 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e"} Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.579227 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.579918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.579938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.579947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.603306 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 18:36:11 crc kubenswrapper[4792]: I1202 18:36:11.623867 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.459716 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.595435 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810"} Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.595550 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.596555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.596618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.596627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.598822 4792 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d" exitCode=0 Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.598988 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d"} Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.599192 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.599336 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.599779 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.600665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.600692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.600703 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.601104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.601157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.601175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.601753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.601784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:12 crc kubenswrapper[4792]: I1202 18:36:12.601801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.605864 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"21a864cca8df919277e92dddd034772dd25991a778cb435aa9977983f19595d7"} Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.606281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d9a0bc12852e3244f38a9854aedc0f74d086ae9dee69dd4d6cd773f743ad7cdf"} Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.606304 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6e51d1dd753c543a66e8121c8221b153bb20949a62b1a227cc9323a48d94e434"} Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.606316 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"937c26cbc0b38b347cdaf65a9e03413bde8eebda56f33830dd20f41513004111"} Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.606030 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.605918 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.606386 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.607466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.607481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.607502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.607510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.607537 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:13 crc kubenswrapper[4792]: I1202 18:36:13.607546 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.274032 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.615044 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"116adfaaadbeb48f2632afac2d1678cf3833bbbeea42356d477d7ae205aa621a"} Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.615086 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.615172 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.615233 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.616792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.616842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.616810 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.616920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.616861 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.616947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.720500 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.720840 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.722829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.722917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:14 crc kubenswrapper[4792]: I1202 18:36:14.722938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:15 crc kubenswrapper[4792]: I1202 18:36:15.618099 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:15 crc kubenswrapper[4792]: I1202 18:36:15.620181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:15 crc kubenswrapper[4792]: I1202 18:36:15.620260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:15 crc kubenswrapper[4792]: I1202 18:36:15.620285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:16 crc kubenswrapper[4792]: I1202 18:36:16.148965 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:16 crc kubenswrapper[4792]: I1202 18:36:16.149413 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:36:16 crc kubenswrapper[4792]: I1202 18:36:16.149633 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:16 crc kubenswrapper[4792]: I1202 18:36:16.151095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:16 crc kubenswrapper[4792]: I1202 18:36:16.151143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:16 crc kubenswrapper[4792]: I1202 18:36:16.151155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.251836 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.252127 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.253831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.253874 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.253884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.449207 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.449401 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.450578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.450609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.450619 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.721185 4792 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.721673 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.929737 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.930060 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.931638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.931693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:17 crc kubenswrapper[4792]: I1202 18:36:17.931709 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.013041 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.013285 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.014749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.014793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.014805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.017822 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.626675 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.628014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.628073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:18 crc kubenswrapper[4792]: I1202 18:36:18.628093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:19 crc kubenswrapper[4792]: E1202 18:36:19.618789 4792 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 18:36:21 crc kubenswrapper[4792]: E1202 18:36:21.126889 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 02 18:36:21 crc kubenswrapper[4792]: I1202 18:36:21.481948 4792 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 02 18:36:21 crc kubenswrapper[4792]: E1202 18:36:21.605685 4792 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 18:36:22 crc kubenswrapper[4792]: W1202 18:36:22.433748 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.433958 4792 trace.go:236] Trace[1966815818]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 18:36:12.432) (total time: 10001ms): Dec 02 18:36:22 crc kubenswrapper[4792]: Trace[1966815818]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (18:36:22.433) Dec 02 18:36:22 crc kubenswrapper[4792]: Trace[1966815818]: [10.001412164s] [10.001412164s] END Dec 02 18:36:22 crc kubenswrapper[4792]: E1202 18:36:22.434005 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 18:36:22 crc kubenswrapper[4792]: E1202 18:36:22.490858 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.727060 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.728710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.728902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.729085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.729320 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 18:36:22 crc kubenswrapper[4792]: W1202 18:36:22.734666 4792 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.734806 4792 trace.go:236] Trace[1599113649]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 18:36:12.732) (total time: 10002ms): Dec 02 18:36:22 crc kubenswrapper[4792]: Trace[1599113649]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (18:36:22.734) Dec 02 18:36:22 crc kubenswrapper[4792]: Trace[1599113649]: [10.002694778s] [10.002694778s] END Dec 02 18:36:22 crc kubenswrapper[4792]: E1202 18:36:22.734850 4792 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.843161 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.843458 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.859798 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 18:36:22 crc kubenswrapper[4792]: I1202 18:36:22.859890 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 18:36:23 crc kubenswrapper[4792]: I1202 18:36:23.815392 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 02 18:36:23 crc kubenswrapper[4792]: I1202 18:36:23.816750 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:23 crc kubenswrapper[4792]: I1202 18:36:23.818840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:23 crc kubenswrapper[4792]: I1202 18:36:23.818910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:23 crc kubenswrapper[4792]: I1202 18:36:23.818928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:23 crc kubenswrapper[4792]: I1202 18:36:23.901222 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.281357 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.281640 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.283178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.283229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.283243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.288268 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.643374 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.643439 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.645412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.645473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.645502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.646003 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.646054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.646076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:24 crc kubenswrapper[4792]: I1202 18:36:24.665760 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 02 18:36:25 crc kubenswrapper[4792]: I1202 18:36:25.646143 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:25 crc kubenswrapper[4792]: I1202 18:36:25.647758 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:25 crc kubenswrapper[4792]: I1202 18:36:25.647837 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:25 crc kubenswrapper[4792]: I1202 18:36:25.647858 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:25 crc kubenswrapper[4792]: I1202 18:36:25.938041 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 18:36:25 crc kubenswrapper[4792]: I1202 18:36:25.957663 4792 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.445300 4792 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.483373 4792 apiserver.go:52] "Watching apiserver" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.489192 4792 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.490121 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.491030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.491088 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:26 crc kubenswrapper[4792]: E1202 18:36:26.491138 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:26 crc kubenswrapper[4792]: E1202 18:36:26.491262 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.491651 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.493137 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.494145 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.494249 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:26 crc kubenswrapper[4792]: E1202 18:36:26.494273 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.495172 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.495957 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.496897 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.496947 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.497406 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.497452 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.497819 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.497843 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.497820 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.548603 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.576005 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.585230 4792 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.597686 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.610225 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.622173 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.638649 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.653936 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.660228 4792 csr.go:261] certificate signing request csr-gnwv4 is approved, waiting to be issued Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.664932 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:26 crc kubenswrapper[4792]: I1202 18:36:26.665997 4792 csr.go:257] certificate signing request csr-gnwv4 is issued Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.148377 4792 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.453587 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.466876 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.470169 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.485876 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.498149 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.532174 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.557183 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.579227 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.666956 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-02 18:31:26 +0000 UTC, rotation deadline is 2026-10-24 16:10:42.777129185 +0000 UTC Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.667015 4792 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7821h34m15.110119023s for next certificate rotation Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.721945 4792 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.722415 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.852767 4792 trace.go:236] Trace[1435143606]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 18:36:13.417) (total time: 14435ms): Dec 02 18:36:27 crc kubenswrapper[4792]: Trace[1435143606]: ---"Objects listed" error: 14435ms (18:36:27.852) Dec 02 18:36:27 crc kubenswrapper[4792]: Trace[1435143606]: [14.435234963s] [14.435234963s] END Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.852805 4792 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.855201 4792 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 02 18:36:27 crc kubenswrapper[4792]: E1202 18:36:27.855418 4792 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.855455 4792 trace.go:236] Trace[1335132867]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 18:36:13.622) (total time: 14233ms): Dec 02 18:36:27 crc kubenswrapper[4792]: Trace[1335132867]: ---"Objects listed" error: 14232ms (18:36:27.855) Dec 02 18:36:27 crc kubenswrapper[4792]: Trace[1335132867]: [14.233116117s] [14.233116117s] END Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.855489 4792 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.899863 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38378->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.899940 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38390->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.899939 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38378->192.168.126.11:17697: read: connection reset by peer" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.900044 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38390->192.168.126.11:17697: read: connection reset by peer" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.900407 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.900443 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.930210 4792 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.930291 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.955854 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.955922 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.955959 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956028 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956063 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956096 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956157 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956188 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956186 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956224 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956255 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956288 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956325 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956356 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956372 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956389 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956379 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956428 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956461 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956492 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956507 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956550 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956587 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956619 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956649 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956712 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956745 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956776 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956806 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956836 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956866 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956896 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956933 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956964 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956998 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957032 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957107 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957140 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957197 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957228 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957268 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957305 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957342 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957376 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957408 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957439 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957557 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957595 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957627 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957658 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957717 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957747 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957778 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957813 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957845 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957879 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957910 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957943 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957982 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958014 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958044 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958078 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958114 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958146 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958183 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958214 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958250 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958281 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958314 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958346 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958381 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958434 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958481 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958515 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958581 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958620 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958651 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958682 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958719 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958750 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958783 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958818 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958915 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958948 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958984 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959018 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959053 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959130 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959188 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959225 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959304 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959373 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959410 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959444 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959514 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959649 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959687 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959789 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959827 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959897 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959933 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960101 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960140 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960175 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960214 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960249 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960286 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960321 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960356 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960391 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960427 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960514 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960615 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960698 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960736 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960771 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960850 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960923 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960993 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961030 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961061 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961095 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961133 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961167 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961200 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961235 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961269 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961300 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961379 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961417 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961451 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961487 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961837 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961878 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961917 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961952 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961987 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962026 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962064 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962098 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962134 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962170 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962208 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962252 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962286 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962321 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962358 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962393 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962429 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962463 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962501 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962918 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962971 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963008 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963052 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963088 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963124 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963161 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963199 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963235 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963270 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963308 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963343 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963377 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963429 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963467 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963502 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963573 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963654 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963694 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963730 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963769 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963803 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963837 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963874 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964025 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964173 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956713 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956735 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.956983 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957221 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957488 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957514 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.957788 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958130 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958268 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958515 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958677 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958764 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.958865 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959007 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959465 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959681 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965749 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.959936 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960089 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960140 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960304 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960564 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.960957 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.961080 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962489 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962730 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.962952 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963133 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963388 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963571 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963484 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963739 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.963866 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964271 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964398 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964480 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964581 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964693 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964779 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964859 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.964995 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965039 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965102 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965151 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965198 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965303 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965318 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965365 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965365 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965447 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965480 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965478 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965488 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965620 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.965686 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.967140 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.967325 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.967854 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.968412 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.968693 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.968707 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.969054 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.969254 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.969356 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.969370 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.969249 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.971263 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.972204 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.972402 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.972711 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.972962 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.973111 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.973354 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.973584 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.973645 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.973705 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.973709 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.973867 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.974114 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.974550 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.975033 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.975305 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: E1202 18:36:27.975756 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:36:28.475728302 +0000 UTC m=+19.248620800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.975753 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976104 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976148 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976177 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976201 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976229 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976255 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976280 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976304 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976326 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976373 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976406 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976436 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976463 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976493 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976485 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976543 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976569 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976593 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976648 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976682 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976711 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976740 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976765 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976817 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976842 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976867 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976959 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.976984 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977012 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977039 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977041 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977068 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977060 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977183 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977200 4792 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977216 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977230 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977244 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977258 4792 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977273 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977275 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977289 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977305 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977319 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977334 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977348 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977362 4792 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977378 4792 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977391 4792 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977406 4792 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977419 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977434 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977450 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977464 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977478 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977492 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977508 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977543 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977557 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977571 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977584 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977598 4792 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977613 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977627 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977660 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977673 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977699 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977713 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977727 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977740 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977755 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977769 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977817 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977832 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977846 4792 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977861 4792 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977884 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977898 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977910 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977925 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977938 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977952 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977966 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977981 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977997 4792 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978015 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978029 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978043 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978058 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978072 4792 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978085 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978100 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978114 4792 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978130 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978145 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978160 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978173 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978186 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978200 4792 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978217 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978231 4792 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978245 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978261 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978275 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978288 4792 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978302 4792 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978316 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978330 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978344 4792 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978364 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978378 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978392 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978406 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978419 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978433 4792 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978449 4792 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978462 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978477 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978492 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978507 4792 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978536 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977433 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977463 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977639 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977629 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977741 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977904 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.977996 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978132 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978402 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978427 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978704 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.978742 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.979016 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.979898 4792 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.980649 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.984897 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.984992 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.985570 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.986146 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.986169 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.987459 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.987637 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.988028 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.988141 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.988219 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.988240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: E1202 18:36:27.988663 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.988706 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.989030 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.987405 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.990780 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.991120 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: E1202 18:36:27.991991 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:27 crc kubenswrapper[4792]: E1202 18:36:27.992023 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:27 crc kubenswrapper[4792]: E1202 18:36:27.992039 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:27 crc kubenswrapper[4792]: E1202 18:36:27.992088 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.992616 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.993313 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.993391 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.994335 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.994813 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.995075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.995354 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.995483 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.999078 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:27 crc kubenswrapper[4792]: I1202 18:36:27.999850 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.005540 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.009716 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:28.488764776 +0000 UTC m=+19.261657314 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.009811 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:28.509763111 +0000 UTC m=+19.282655439 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.009832 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:28.509823572 +0000 UTC m=+19.282715900 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.010694 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.010851 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.011439 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.011477 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.011535 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.011828 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.011855 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.011879 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.011960 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:28.511919438 +0000 UTC m=+19.284811886 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.012710 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.012917 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.013036 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.013135 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.013755 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.014396 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.014479 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.014851 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.014855 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.015030 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.015190 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.015367 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.015402 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.016000 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.016188 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.016281 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.016590 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.017238 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.017285 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.017684 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.017994 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.018307 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.018946 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.019350 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.019473 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.019673 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.020289 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.020320 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.023259 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.023544 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.023766 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.023844 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.023968 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.024105 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.024850 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.025157 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.026020 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.026679 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.027107 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.028310 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.028542 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.028619 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.028992 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.029072 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.029474 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.029543 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.029860 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.030050 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.030128 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.030827 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.030946 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.032950 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.034167 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.034222 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.034306 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.034340 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.035061 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.035068 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.036189 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.036464 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.038145 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.038506 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.039614 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.039683 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.044883 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.050054 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.052234 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.056589 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079104 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079168 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079215 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079228 4792 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079239 4792 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079250 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079260 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079268 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079278 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079287 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079296 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079304 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079314 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079323 4792 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079332 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079341 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079350 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079360 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079369 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079378 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079389 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079401 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079413 4792 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079425 4792 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079438 4792 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079451 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079463 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079477 4792 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079514 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079560 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079570 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079580 4792 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079591 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079600 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079628 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079639 4792 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079648 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079657 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079667 4792 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079676 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079684 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079693 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079702 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079710 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079718 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079727 4792 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079737 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079747 4792 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079755 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079765 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079775 4792 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079784 4792 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079794 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079803 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079812 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079820 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079828 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079837 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079846 4792 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079855 4792 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079863 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079883 4792 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079900 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079924 4792 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079935 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079945 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079955 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079964 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079972 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079981 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.079990 4792 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080000 4792 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080010 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080020 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080029 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080039 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080048 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080058 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080067 4792 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080075 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080084 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080092 4792 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080101 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080110 4792 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080119 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080127 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080136 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080145 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080152 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080162 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080171 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080180 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080188 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080197 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080206 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080214 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080224 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080233 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080241 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080249 4792 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080258 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080266 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080274 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080283 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080291 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080299 4792 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080307 4792 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080315 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080324 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080369 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.080461 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.318046 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 18:36:28 crc kubenswrapper[4792]: W1202 18:36:28.331285 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-ae57e41de57e42d069149466db73f8e0caa4c9a45064ebbbba4057c0a3a3f955 WatchSource:0}: Error finding container ae57e41de57e42d069149466db73f8e0caa4c9a45064ebbbba4057c0a3a3f955: Status 404 returned error can't find the container with id ae57e41de57e42d069149466db73f8e0caa4c9a45064ebbbba4057c0a3a3f955 Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.347774 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 18:36:28 crc kubenswrapper[4792]: W1202 18:36:28.430243 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-0b86a0b50e83e4bc3b4273306cc883f3f999727df593bebc54925cdc6af8cac8 WatchSource:0}: Error finding container 0b86a0b50e83e4bc3b4273306cc883f3f999727df593bebc54925cdc6af8cac8: Status 404 returned error can't find the container with id 0b86a0b50e83e4bc3b4273306cc883f3f999727df593bebc54925cdc6af8cac8 Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.483974 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.484131 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:36:29.484106024 +0000 UTC m=+20.256998352 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.539063 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.539186 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.539249 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.539301 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.539345 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.539395 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.585490 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.585557 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.585586 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.585611 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585730 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585768 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585825 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585846 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585849 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585898 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:29.585885011 +0000 UTC m=+20.358777339 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585914 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:29.585908041 +0000 UTC m=+20.358800369 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585829 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585976 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:29.585950612 +0000 UTC m=+20.358842980 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.585791 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.586004 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:28 crc kubenswrapper[4792]: E1202 18:36:28.586042 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:29.586030595 +0000 UTC m=+20.358922953 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.590599 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-l7jxh"] Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.590967 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.593045 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.593072 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.593843 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.603107 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.614237 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.625707 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.640478 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.654056 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.655395 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810" exitCode=255 Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.655441 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810"} Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.656852 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"0b86a0b50e83e4bc3b4273306cc883f3f999727df593bebc54925cdc6af8cac8"} Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.657643 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ae57e41de57e42d069149466db73f8e0caa4c9a45064ebbbba4057c0a3a3f955"} Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.659077 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.659428 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64"} Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.659470 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4"} Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.659482 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"57ffd5505d1c5974e80a5f223bbcf62ff7a0339e3bcb9809bd4b90d2dfb78516"} Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.682038 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.685935 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/da09a4ee-4e22-4396-a352-7bcb2b89db73-hosts-file\") pod \"node-resolver-l7jxh\" (UID: \"da09a4ee-4e22-4396-a352-7bcb2b89db73\") " pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.685975 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqc7n\" (UniqueName: \"kubernetes.io/projected/da09a4ee-4e22-4396-a352-7bcb2b89db73-kube-api-access-nqc7n\") pod \"node-resolver-l7jxh\" (UID: \"da09a4ee-4e22-4396-a352-7bcb2b89db73\") " pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.708075 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.723223 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.723719 4792 scope.go:117] "RemoveContainer" containerID="68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.724725 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.745460 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.759625 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.783008 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.786842 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqc7n\" (UniqueName: \"kubernetes.io/projected/da09a4ee-4e22-4396-a352-7bcb2b89db73-kube-api-access-nqc7n\") pod \"node-resolver-l7jxh\" (UID: \"da09a4ee-4e22-4396-a352-7bcb2b89db73\") " pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.786922 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/da09a4ee-4e22-4396-a352-7bcb2b89db73-hosts-file\") pod \"node-resolver-l7jxh\" (UID: \"da09a4ee-4e22-4396-a352-7bcb2b89db73\") " pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.787077 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/da09a4ee-4e22-4396-a352-7bcb2b89db73-hosts-file\") pod \"node-resolver-l7jxh\" (UID: \"da09a4ee-4e22-4396-a352-7bcb2b89db73\") " pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.807872 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.814212 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqc7n\" (UniqueName: \"kubernetes.io/projected/da09a4ee-4e22-4396-a352-7bcb2b89db73-kube-api-access-nqc7n\") pod \"node-resolver-l7jxh\" (UID: \"da09a4ee-4e22-4396-a352-7bcb2b89db73\") " pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.850202 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.868589 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.897216 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.919599 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.926830 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-l7jxh" Dec 02 18:36:28 crc kubenswrapper[4792]: W1202 18:36:28.939800 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda09a4ee_4e22_4396_a352_7bcb2b89db73.slice/crio-766497c4d774611e38f098768ce396bc7012919ef5c725e277980ce47c750c95 WatchSource:0}: Error finding container 766497c4d774611e38f098768ce396bc7012919ef5c725e277980ce47c750c95: Status 404 returned error can't find the container with id 766497c4d774611e38f098768ce396bc7012919ef5c725e277980ce47c750c95 Dec 02 18:36:28 crc kubenswrapper[4792]: I1202 18:36:28.983611 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:28Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.071691 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-wpdh4"] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.072316 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.074494 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.074718 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.074845 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.074875 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.075615 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.085885 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.109609 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.162026 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.176284 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.189878 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-mcd-auth-proxy-config\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.189922 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j2px\" (UniqueName: \"kubernetes.io/projected/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-kube-api-access-8j2px\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.189964 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-proxy-tls\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.189986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-rootfs\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.199780 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.220751 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.238292 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.253012 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.273889 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.288249 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.290546 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-mcd-auth-proxy-config\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.290615 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j2px\" (UniqueName: \"kubernetes.io/projected/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-kube-api-access-8j2px\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.290674 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-proxy-tls\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.290702 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-rootfs\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.290758 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-rootfs\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.292293 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-mcd-auth-proxy-config\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.300090 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-proxy-tls\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.324746 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j2px\" (UniqueName: \"kubernetes.io/projected/f3a866a8-b9d9-4a3a-a721-9fe56db62c1f-kube-api-access-8j2px\") pod \"machine-config-daemon-wpdh4\" (UID: \"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\") " pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.406600 4792 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.406846 4792 reflector.go:484] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.406888 4792 reflector.go:484] object-"openshift-machine-config-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.407232 4792 reflector.go:484] object-"openshift-machine-config-operator"/"proxy-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"proxy-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.407272 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.407361 4792 reflector.go:484] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.407420 4792 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.407443 4792 reflector.go:484] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": watch of *v1.Secret ended with: very short watch: object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.407647 4792 reflector.go:484] object-"openshift-dns"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.407691 4792 reflector.go:484] object-"openshift-machine-config-operator"/"kube-rbac-proxy": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-rbac-proxy": Unexpected watch close - watch lasted less than a second and no items received Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.423611 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3a866a8_b9d9_4a3a_a721_9fe56db62c1f.slice/crio-4a8c6933301b3029c519208e342d2e99151754a048414743e707c68fb7fb19b3 WatchSource:0}: Error finding container 4a8c6933301b3029c519208e342d2e99151754a048414743e707c68fb7fb19b3: Status 404 returned error can't find the container with id 4a8c6933301b3029c519208e342d2e99151754a048414743e707c68fb7fb19b3 Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.461557 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-l66ss"] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.462302 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.464005 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.464639 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.464702 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.467002 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-dw25w"] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.467504 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.467556 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.467699 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4jhb5"] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.468428 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.468646 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.469448 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.470947 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.471211 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.471446 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.471584 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.471789 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.471920 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.472019 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.472226 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.485601 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.492165 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.492319 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:36:31.49229646 +0000 UTC m=+22.265188788 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.500742 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.511479 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.522314 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.534021 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.544150 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.544670 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.545915 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.546538 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.547498 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.548097 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.548790 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.551003 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.551680 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.553096 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.553649 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.554732 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.555250 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.555771 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.555776 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.556668 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.557167 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.558211 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.559009 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.560269 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.561431 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.562103 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.563403 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.563885 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.564832 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.565302 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.566338 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.566958 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.568376 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.568954 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.569785 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.570431 4792 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.570553 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.572159 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.573001 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.573453 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.574995 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.575882 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.576023 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.576628 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.577806 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.578777 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.579346 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.580611 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.581834 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.582731 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.583872 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.584588 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.585716 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.586821 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.588001 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.589130 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.589939 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.590652 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.592704 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.592761 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-log-socket\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.592810 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-bin\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.592836 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovn-node-metrics-cert\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.592891 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-k8s-cni-cncf-io\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.592914 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.592943 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.592959 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.592920 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-socket-dir-parent\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.593017 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:31.592997648 +0000 UTC m=+22.365889986 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593134 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-kubelet\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593202 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593255 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cnibin\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593288 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593332 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ztfg\" (UniqueName: \"kubernetes.io/projected/b69235e1-c1ab-41e3-af2c-14b956c6c37b-kube-api-access-9ztfg\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593339 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593365 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-os-release\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593429 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-netns\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.593455 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.593488 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.593500 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593456 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-slash\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.593560 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:31.593547513 +0000 UTC m=+22.366439841 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593606 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-var-lib-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593633 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-system-cni-dir\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593658 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-os-release\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593679 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593702 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593782 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-system-cni-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593820 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-config\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593844 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vd5d\" (UniqueName: \"kubernetes.io/projected/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-kube-api-access-2vd5d\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593873 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-netd\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593900 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-cnibin\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593922 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-netns\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593943 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-etc-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-env-overrides\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.593988 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-script-lib\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594016 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cni-binary-copy\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594038 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6925e194-2dc8-4a3a-aa76-8db41ff27997-cni-binary-copy\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594059 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-cni-multus\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594081 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-hostroot\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594119 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tjt4\" (UniqueName: \"kubernetes.io/projected/6925e194-2dc8-4a3a-aa76-8db41ff27997-kube-api-access-2tjt4\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594174 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-systemd-units\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594203 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594276 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594285 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594333 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-multus-certs\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594360 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-node-log\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594382 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-ovn-kubernetes\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.594753 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.595310 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.596013 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.596136 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:31.596114781 +0000 UTC m=+22.369007109 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596234 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-etc-kubernetes\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596302 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-systemd\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596358 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-cni-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596378 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-cni-bin\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596397 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-conf-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596567 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-daemon-config\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596631 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-kubelet\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.596705 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-ovn\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.596866 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: E1202 18:36:29.596946 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:31.596937992 +0000 UTC m=+22.369830320 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.608839 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.625589 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.639123 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.656650 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.665036 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-l7jxh" event={"ID":"da09a4ee-4e22-4396-a352-7bcb2b89db73","Type":"ContainerStarted","Data":"68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307"} Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.665095 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-l7jxh" event={"ID":"da09a4ee-4e22-4396-a352-7bcb2b89db73","Type":"ContainerStarted","Data":"766497c4d774611e38f098768ce396bc7012919ef5c725e277980ce47c750c95"} Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.673755 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.675624 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89"} Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.678628 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06"} Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.678716 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c"} Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.678776 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"4a8c6933301b3029c519208e342d2e99151754a048414743e707c68fb7fb19b3"} Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.685902 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.690830 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1"} Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.690867 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.691059 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.698995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-system-cni-dir\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699057 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-os-release\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699093 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699117 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699136 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-system-cni-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-system-cni-dir\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-config\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699215 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vd5d\" (UniqueName: \"kubernetes.io/projected/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-kube-api-access-2vd5d\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699235 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-netd\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-etc-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699270 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-env-overrides\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-cnibin\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699309 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-netns\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699330 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-script-lib\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699346 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tjt4\" (UniqueName: \"kubernetes.io/projected/6925e194-2dc8-4a3a-aa76-8db41ff27997-kube-api-access-2tjt4\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699361 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-systemd-units\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699379 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cni-binary-copy\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699398 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6925e194-2dc8-4a3a-aa76-8db41ff27997-cni-binary-copy\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699416 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-cni-multus\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699431 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-hostroot\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699449 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699480 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-multus-certs\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699496 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-node-log\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699516 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-ovn-kubernetes\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699562 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-etc-kubernetes\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699580 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-systemd\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699596 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-conf-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699613 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-daemon-config\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699632 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-cni-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699647 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-cni-bin\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699663 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-kubelet\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699678 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-ovn\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699705 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-log-socket\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699738 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-bin\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699754 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovn-node-metrics-cert\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-k8s-cni-cncf-io\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699790 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-socket-dir-parent\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699805 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-kubelet\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699815 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-config\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699829 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cnibin\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699845 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-os-release\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699859 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-netns\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699879 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699895 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ztfg\" (UniqueName: \"kubernetes.io/projected/b69235e1-c1ab-41e3-af2c-14b956c6c37b-kube-api-access-9ztfg\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-slash\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699925 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-var-lib-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.699978 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-var-lib-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700077 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-os-release\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700216 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-netd\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700240 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-etc-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700686 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-env-overrides\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700729 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-cnibin\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700728 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700760 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700772 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-netns\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.700922 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-system-cni-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701119 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-cni-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701180 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-script-lib\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701214 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-cni-bin\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701238 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-kubelet\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701263 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-ovn\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701284 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-log-socket\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701305 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-bin\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701434 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-systemd-units\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701816 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-os-release\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701866 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-k8s-cni-cncf-io\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701901 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-socket-dir-parent\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701927 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-kubelet\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701953 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cnibin\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701979 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-node-log\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.701976 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b69235e1-c1ab-41e3-af2c-14b956c6c37b-cni-binary-copy\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702012 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-ovn-kubernetes\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702057 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-etc-kubernetes\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702091 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-systemd\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702125 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-conf-dir\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702635 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6925e194-2dc8-4a3a-aa76-8db41ff27997-cni-binary-copy\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702680 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-var-lib-cni-multus\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702704 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-hostroot\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702703 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6925e194-2dc8-4a3a-aa76-8db41ff27997-multus-daemon-config\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702728 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-openvswitch\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702764 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-multus-certs\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702932 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b69235e1-c1ab-41e3-af2c-14b956c6c37b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702967 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6925e194-2dc8-4a3a-aa76-8db41ff27997-host-run-netns\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.702973 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-slash\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.709302 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.721450 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.736284 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.750974 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovn-node-metrics-cert\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.751039 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ztfg\" (UniqueName: \"kubernetes.io/projected/b69235e1-c1ab-41e3-af2c-14b956c6c37b-kube-api-access-9ztfg\") pod \"multus-additional-cni-plugins-l66ss\" (UID: \"b69235e1-c1ab-41e3-af2c-14b956c6c37b\") " pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.751167 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tjt4\" (UniqueName: \"kubernetes.io/projected/6925e194-2dc8-4a3a-aa76-8db41ff27997-kube-api-access-2tjt4\") pod \"multus-dw25w\" (UID: \"6925e194-2dc8-4a3a-aa76-8db41ff27997\") " pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.751755 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vd5d\" (UniqueName: \"kubernetes.io/projected/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-kube-api-access-2vd5d\") pod \"ovnkube-node-4jhb5\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.764001 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.779265 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.788352 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-l66ss" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.794947 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-dw25w" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.797117 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.799388 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.800390 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb69235e1_c1ab_41e3_af2c_14b956c6c37b.slice/crio-4659b90f6cfd30c6e41268bfce1c8782351302c584425fbd5f6150736b556626 WatchSource:0}: Error finding container 4659b90f6cfd30c6e41268bfce1c8782351302c584425fbd5f6150736b556626: Status 404 returned error can't find the container with id 4659b90f6cfd30c6e41268bfce1c8782351302c584425fbd5f6150736b556626 Dec 02 18:36:29 crc kubenswrapper[4792]: W1202 18:36:29.809991 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6925e194_2dc8_4a3a_aa76_8db41ff27997.slice/crio-bef78fcf8f1d0bf3ae5bbbca24afd918052296459c0a41a36022694e4a7d6b22 WatchSource:0}: Error finding container bef78fcf8f1d0bf3ae5bbbca24afd918052296459c0a41a36022694e4a7d6b22: Status 404 returned error can't find the container with id bef78fcf8f1d0bf3ae5bbbca24afd918052296459c0a41a36022694e4a7d6b22 Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.817605 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.829161 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.892656 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.936507 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.955652 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:29 crc kubenswrapper[4792]: I1202 18:36:29.986730 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.026546 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.055805 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.072790 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.083139 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.101995 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.115471 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.128795 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.159553 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.178508 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.199888 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.221493 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.274787 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.538736 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.538767 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.538840 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:30 crc kubenswrapper[4792]: E1202 18:36:30.538893 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:30 crc kubenswrapper[4792]: E1202 18:36:30.539028 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:30 crc kubenswrapper[4792]: E1202 18:36:30.539153 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.683113 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.695058 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82" exitCode=0 Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.695193 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82"} Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.695222 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"78c44b30e29d8ceb2584886c61dcbcdeeffb7ebb62ae2123f27d88326b5bf322"} Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.699545 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerStarted","Data":"c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805"} Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.699593 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerStarted","Data":"bef78fcf8f1d0bf3ae5bbbca24afd918052296459c0a41a36022694e4a7d6b22"} Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.707984 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerStarted","Data":"9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff"} Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.708019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerStarted","Data":"4659b90f6cfd30c6e41268bfce1c8782351302c584425fbd5f6150736b556626"} Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.708999 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.737277 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.749205 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.769288 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.782403 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.789009 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.795297 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.810174 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.828819 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.834474 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.852542 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.856258 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.866376 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.867663 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.887143 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.904503 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.922580 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.931111 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.946671 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.968247 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.977089 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 18:36:30 crc kubenswrapper[4792]: I1202 18:36:30.994128 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:30Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.012747 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.035848 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.055989 4792 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.058351 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.058514 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.058650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.058923 4792 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.069848 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.076122 4792 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.076832 4792 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.082777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.082814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.082824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.082845 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.082861 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.114949 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.126717 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.134574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.134793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.134877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.134971 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.135049 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.137329 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.155608 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.158562 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.162323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.162364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.162377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.162398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.162412 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.173665 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.178269 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.181205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.181242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.181259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.181287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.181304 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.188475 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.192311 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.197996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.198027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.198039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.198060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.198074 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.202571 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.212830 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.212952 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.213219 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.214489 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.214513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.214547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.214563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.214576 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.319793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.319848 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.319856 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.319871 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.319881 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.422573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.422618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.422631 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.422650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.422661 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.525300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.525362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.525373 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.525398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.525412 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.525944 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.526254 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:36:35.526213807 +0000 UTC m=+26.299106165 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.626780 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.626843 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.626876 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.626899 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.626976 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627062 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:35.627041229 +0000 UTC m=+26.399933577 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627068 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627089 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627103 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627163 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:35.627144322 +0000 UTC m=+26.400036650 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627201 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627224 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:35.627218504 +0000 UTC m=+26.400110832 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627268 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627277 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627284 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:31 crc kubenswrapper[4792]: E1202 18:36:31.627309 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:35.627302706 +0000 UTC m=+26.400195024 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.627668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.627698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.627708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.627726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.627738 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.713028 4792 generic.go:334] "Generic (PLEG): container finished" podID="b69235e1-c1ab-41e3-af2c-14b956c6c37b" containerID="9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff" exitCode=0 Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.713139 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerDied","Data":"9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.718560 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.718618 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.718634 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.718646 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.718656 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.718666 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.721416 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.730303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.730350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.730360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.730381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.730399 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.739831 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.752011 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.780829 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.802355 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.816143 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.829688 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.832867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.832901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.832910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.832930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.832942 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.844000 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.860766 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.869008 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-qrlwg"] Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.871484 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.874263 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.874492 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.874649 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.875433 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.877429 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.890810 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.917316 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.931968 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85a6200e-64b5-4e6b-bd19-933e3b576bfa-host\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.932006 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/85a6200e-64b5-4e6b-bd19-933e3b576bfa-serviceca\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.932035 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jt4k\" (UniqueName: \"kubernetes.io/projected/85a6200e-64b5-4e6b-bd19-933e3b576bfa-kube-api-access-4jt4k\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.935804 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.935889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.935907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.935942 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.935961 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:31Z","lastTransitionTime":"2025-12-02T18:36:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.937090 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.952986 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.968774 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:31 crc kubenswrapper[4792]: I1202 18:36:31.983993 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:31Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.013351 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.031817 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.033473 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85a6200e-64b5-4e6b-bd19-933e3b576bfa-host\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.033575 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/85a6200e-64b5-4e6b-bd19-933e3b576bfa-serviceca\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.033637 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jt4k\" (UniqueName: \"kubernetes.io/projected/85a6200e-64b5-4e6b-bd19-933e3b576bfa-kube-api-access-4jt4k\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.033647 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85a6200e-64b5-4e6b-bd19-933e3b576bfa-host\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.034903 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/85a6200e-64b5-4e6b-bd19-933e3b576bfa-serviceca\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.038417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.038471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.038490 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.038565 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.038590 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.048198 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.059504 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jt4k\" (UniqueName: \"kubernetes.io/projected/85a6200e-64b5-4e6b-bd19-933e3b576bfa-kube-api-access-4jt4k\") pod \"node-ca-qrlwg\" (UID: \"85a6200e-64b5-4e6b-bd19-933e3b576bfa\") " pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.065019 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.082712 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.097276 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.112671 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.132223 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.141037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.141082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.141090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.141108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.141119 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.145902 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.159720 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.172137 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.189767 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qrlwg" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.190374 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.244339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.244396 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.244409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.244430 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.244445 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.347751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.347808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.347828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.347852 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.347875 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.450642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.450695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.450707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.450730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.450742 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.538622 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.538692 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:32 crc kubenswrapper[4792]: E1202 18:36:32.538744 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:32 crc kubenswrapper[4792]: E1202 18:36:32.538877 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.538995 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:32 crc kubenswrapper[4792]: E1202 18:36:32.539093 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.553026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.553054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.553062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.553075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.553084 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.657154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.657207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.657217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.657235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.657246 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.729336 4792 generic.go:334] "Generic (PLEG): container finished" podID="b69235e1-c1ab-41e3-af2c-14b956c6c37b" containerID="d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63" exitCode=0 Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.729463 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerDied","Data":"d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.732210 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qrlwg" event={"ID":"85a6200e-64b5-4e6b-bd19-933e3b576bfa","Type":"ContainerStarted","Data":"d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.732307 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qrlwg" event={"ID":"85a6200e-64b5-4e6b-bd19-933e3b576bfa","Type":"ContainerStarted","Data":"c3e9d223693274e81cfdb8331ead68b813b620233b89e48ed128dc8052d9937f"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.747040 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.763117 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.763753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.763768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.763789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.763806 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.782371 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.800850 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.827197 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.841516 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.855375 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.867759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.867806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.867815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.867834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.867844 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.868607 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.881211 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.892132 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.906349 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.919096 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.928939 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.942022 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.956934 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.971179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.971259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.971274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.971294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.971310 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:32Z","lastTransitionTime":"2025-12-02T18:36:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.974623 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:32 crc kubenswrapper[4792]: I1202 18:36:32.988321 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.002381 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.016578 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.029450 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.041624 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.053415 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.071309 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.073936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.073979 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.073993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.074013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.074025 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.085326 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.112665 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.145551 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.176501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.176570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.176583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.176603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.176620 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.189954 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.233441 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.264955 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.279588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.279650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.279668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.279693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.279715 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.382404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.382460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.382478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.382501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.382552 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.485137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.485196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.485214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.485244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.485266 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.588461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.588583 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.588610 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.588644 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.588666 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.692153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.692206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.692221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.692245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.692258 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.740181 4792 generic.go:334] "Generic (PLEG): container finished" podID="b69235e1-c1ab-41e3-af2c-14b956c6c37b" containerID="ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648" exitCode=0 Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.740287 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerDied","Data":"ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.746579 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.760382 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.781004 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.794883 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.794949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.794967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.794996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.795016 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.803190 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.821587 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.836948 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.853089 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.867251 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.892092 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.898300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.898360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.898378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.898405 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.898422 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:33Z","lastTransitionTime":"2025-12-02T18:36:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.906193 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.925934 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.946847 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.963666 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:33 crc kubenswrapper[4792]: I1202 18:36:33.984008 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:33Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.002123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.002662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.002674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.002692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.002711 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.002840 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.105161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.105194 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.105202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.105221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.105231 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.211026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.211067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.211097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.211119 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.211130 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.314175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.314252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.314271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.314300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.314320 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.419126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.419178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.419196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.419250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.419263 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.523293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.523336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.523344 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.523362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.523372 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.539110 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:34 crc kubenswrapper[4792]: E1202 18:36:34.539337 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.539972 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:34 crc kubenswrapper[4792]: E1202 18:36:34.540070 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.540142 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:34 crc kubenswrapper[4792]: E1202 18:36:34.540230 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.626604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.626681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.626699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.626732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.626759 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.728630 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.732101 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.732179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.732198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.732227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.732249 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.736864 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.748911 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.757428 4792 generic.go:334] "Generic (PLEG): container finished" podID="b69235e1-c1ab-41e3-af2c-14b956c6c37b" containerID="b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba" exitCode=0 Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.757618 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerDied","Data":"b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.776659 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.797941 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.817040 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.836059 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.836132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.836151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.836181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.836201 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.841340 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.863750 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.883741 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.904516 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.919822 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.937109 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.940374 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.940481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.940503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.940586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.940609 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:34Z","lastTransitionTime":"2025-12-02T18:36:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.956702 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:34 crc kubenswrapper[4792]: I1202 18:36:34.974429 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.009226 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.027580 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.045858 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.045915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.045931 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.045958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.045976 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.050667 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.069976 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.084918 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.100674 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.115323 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.132029 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.150742 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.150883 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.150932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.150949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.150975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.150994 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.168957 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.185406 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.217692 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.239933 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.253766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.253801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.253812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.253832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.253847 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.263290 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.285894 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.305426 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.357272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.357365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.357384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.357409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.357429 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.461015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.461087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.461112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.461140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.461157 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.564982 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.565047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.565060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.565078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.565089 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.577365 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.577568 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:36:43.577537925 +0000 UTC m=+34.350430263 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.668471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.668577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.668597 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.668625 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.668644 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.679337 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.679428 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.679476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.679582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679630 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679696 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679724 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679738 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679762 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679812 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679865 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.679881 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.680373 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:43.679761284 +0000 UTC m=+34.452653652 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.680436 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:43.680406701 +0000 UTC m=+34.453299069 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.680465 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:43.680451832 +0000 UTC m=+34.453344200 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:35 crc kubenswrapper[4792]: E1202 18:36:35.680489 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:43.680477492 +0000 UTC m=+34.453369860 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.764401 4792 generic.go:334] "Generic (PLEG): container finished" podID="b69235e1-c1ab-41e3-af2c-14b956c6c37b" containerID="740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b" exitCode=0 Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.764447 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerDied","Data":"740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.770845 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.770893 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.770908 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.770931 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.770947 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.788542 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.803758 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.824562 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.840040 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.856845 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.869710 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.874371 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.874567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.874594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.874640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.874655 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.893159 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.907459 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.925079 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.941150 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.950982 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.965165 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.978699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.978840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.978857 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.978911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.978932 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:35Z","lastTransitionTime":"2025-12-02T18:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:35 crc kubenswrapper[4792]: I1202 18:36:35.988459 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:35Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.007371 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.083543 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.083584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.083594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.083616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.083643 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.187104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.187153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.187165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.187182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.187192 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.291964 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.292025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.292071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.292090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.292100 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.395383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.395443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.395462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.395578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.395607 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.499350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.499395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.499406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.499444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.499456 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.539331 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.539437 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.539591 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:36 crc kubenswrapper[4792]: E1202 18:36:36.539940 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:36 crc kubenswrapper[4792]: E1202 18:36:36.540579 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:36 crc kubenswrapper[4792]: E1202 18:36:36.540696 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.602387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.602440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.602457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.602480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.602496 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.704840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.704886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.704902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.704921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.704935 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.777370 4792 generic.go:334] "Generic (PLEG): container finished" podID="b69235e1-c1ab-41e3-af2c-14b956c6c37b" containerID="4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d" exitCode=0 Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.777512 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerDied","Data":"4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.784215 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.784635 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.801779 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.806908 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.806942 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.806952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.806972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.806981 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.807345 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.814798 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.829430 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.847662 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.861962 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.875412 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.888715 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.903109 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.909444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.909484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.909493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.909512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.909554 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:36Z","lastTransitionTime":"2025-12-02T18:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.916147 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.926691 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.943559 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.971318 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.985255 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:36 crc kubenswrapper[4792]: I1202 18:36:36.997004 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:36Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.011503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.011552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.011563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.011579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.011592 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.012150 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.025659 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.041687 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.055832 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.068951 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.081536 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.093757 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.105574 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.114717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.114788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.114802 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.114843 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.114856 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.115468 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.135257 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.148559 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.175622 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.193988 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.208789 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.217589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.217645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.217663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.217690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.217708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.320822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.320897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.320925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.320955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.320978 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.423580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.423622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.423633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.423649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.423661 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.527076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.527132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.527150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.527175 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.527193 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.630685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.630737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.630749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.630767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.630780 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.733906 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.733973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.733996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.734034 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.734052 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.791160 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.792008 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.836856 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.837206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.837244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.837255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.837272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.837284 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.858753 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.894373 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.918664 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.940584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.940661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.940694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.940726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.940747 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:37Z","lastTransitionTime":"2025-12-02T18:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.943161 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.960896 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:37 crc kubenswrapper[4792]: I1202 18:36:37.978792 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.001680 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:37Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.019382 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.038727 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.044050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.044106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.044124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.044329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.044343 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.059401 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.077370 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.094905 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.111036 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.131865 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.148125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.148182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.148201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.148229 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.148255 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.251208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.251285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.251304 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.251330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.251351 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.355441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.355511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.355560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.355588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.355606 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.459580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.459678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.459699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.459726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.459743 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.539818 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.539860 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.539818 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:38 crc kubenswrapper[4792]: E1202 18:36:38.540043 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:38 crc kubenswrapper[4792]: E1202 18:36:38.540174 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:38 crc kubenswrapper[4792]: E1202 18:36:38.540355 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.562439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.562501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.562547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.562572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.562591 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.664897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.664955 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.664968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.664987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.664999 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.774015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.774072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.774090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.774110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.774122 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.801981 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" event={"ID":"b69235e1-c1ab-41e3-af2c-14b956c6c37b","Type":"ContainerStarted","Data":"dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.802070 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.822152 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.839671 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.855803 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.868409 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.876449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.876497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.876509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.876550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.876565 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.880039 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.891451 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.901325 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.911745 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.921816 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.937963 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.952217 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.966765 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.978808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.978867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.978881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.978904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.978917 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:38Z","lastTransitionTime":"2025-12-02T18:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.982638 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:38 crc kubenswrapper[4792]: I1202 18:36:38.992461 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:38Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.081304 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.081349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.081362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.081378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.081389 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.183956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.183999 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.184007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.184024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.184034 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.288364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.288431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.288444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.288464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.288478 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.391651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.391706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.391720 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.391740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.391754 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.494460 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.494588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.494605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.494633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.494652 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.561361 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.580433 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.598037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.598210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.598239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.598282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.598310 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.601938 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.623381 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.641984 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.674690 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.691590 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.702383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.702698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.702819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.702924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.703023 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.709018 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.730738 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.744363 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.756680 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.773296 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.793487 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.805196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.805787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.805867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.805932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.805990 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.808561 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/0.log" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.813368 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c" exitCode=1 Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.813462 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.814883 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.815012 4792 scope.go:117] "RemoveContainer" containerID="b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.840303 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.858128 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.872068 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.884828 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.898634 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.909265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.909327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.909345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.909369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.909388 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:39Z","lastTransitionTime":"2025-12-02T18:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.916141 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.937569 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.954887 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:39 crc kubenswrapper[4792]: I1202 18:36:39.980398 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:39.996902 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.012552 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.017393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.017484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.017505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.017549 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.017637 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.026693 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.050079 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:39Z\\\",\\\"message\\\":\\\"1.NetworkPolicy event handler 4 for removal\\\\nI1202 18:36:39.223547 6077 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 18:36:39.223597 6077 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 18:36:39.223620 6077 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 18:36:39.223657 6077 factory.go:656] Stopping watch factory\\\\nI1202 18:36:39.223685 6077 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 18:36:39.223709 6077 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:36:39.223734 6077 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 18:36:39.223756 6077 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 18:36:39.223779 6077 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 18:36:39.223801 6077 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 18:36:39.223824 6077 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 18:36:39.223912 6077 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 18:36:39.224471 6077 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.066437 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.120589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.120632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.120640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.120656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.120664 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.224012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.224050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.224061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.224079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.224091 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.326837 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.326879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.326889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.326907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.326920 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.429432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.429469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.429484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.429499 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.429511 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.533851 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.533935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.534006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.534032 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.534049 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.539179 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:40 crc kubenswrapper[4792]: E1202 18:36:40.539291 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.539186 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.539322 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:40 crc kubenswrapper[4792]: E1202 18:36:40.539349 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:40 crc kubenswrapper[4792]: E1202 18:36:40.539436 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.636332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.636378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.636393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.636413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.636426 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.739587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.739640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.739652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.739672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.739686 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.820830 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/0.log" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.824602 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.824721 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.842961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.843017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.843028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.843047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.843061 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.843951 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.861816 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.878605 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.892923 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.924474 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:39Z\\\",\\\"message\\\":\\\"1.NetworkPolicy event handler 4 for removal\\\\nI1202 18:36:39.223547 6077 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 18:36:39.223597 6077 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 18:36:39.223620 6077 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 18:36:39.223657 6077 factory.go:656] Stopping watch factory\\\\nI1202 18:36:39.223685 6077 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 18:36:39.223709 6077 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:36:39.223734 6077 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 18:36:39.223756 6077 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 18:36:39.223779 6077 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 18:36:39.223801 6077 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 18:36:39.223824 6077 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 18:36:39.223912 6077 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 18:36:39.224471 6077 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.938054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.945495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.945545 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.945557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.945572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.945585 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:40Z","lastTransitionTime":"2025-12-02T18:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.960377 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.977773 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:40 crc kubenswrapper[4792]: I1202 18:36:40.991980 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:40Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.007949 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.023439 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.037256 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.049217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.049258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.049268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.049286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.049299 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.054766 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.087165 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.152305 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.152341 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.152353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.152370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.152383 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.256344 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.256409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.256425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.256452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.256474 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.281004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.281091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.281113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.281145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.281167 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: E1202 18:36:41.304714 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.310042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.310097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.310116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.310141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.310158 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: E1202 18:36:41.328264 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.333166 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.333224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.333247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.333275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.333296 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: E1202 18:36:41.346466 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.351242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.351312 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.351329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.351367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.351395 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: E1202 18:36:41.377021 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.383873 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.383933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.383957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.383993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.384019 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: E1202 18:36:41.412500 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: E1202 18:36:41.412834 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.415700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.415752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.415769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.415798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.415819 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.518857 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.518908 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.518919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.518935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.518944 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.622440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.622495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.622506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.622539 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.622549 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.727215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.727250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.727259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.727275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.727285 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.829094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.829141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.829156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.829176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.829191 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.830873 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/1.log" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.831740 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/0.log" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.835797 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9" exitCode=1 Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.835907 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.836059 4792 scope.go:117] "RemoveContainer" containerID="b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.836657 4792 scope.go:117] "RemoveContainer" containerID="39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9" Dec 02 18:36:41 crc kubenswrapper[4792]: E1202 18:36:41.836886 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.855560 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.877710 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.899052 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.919953 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.932299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.932378 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.932398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.932445 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.932466 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:41Z","lastTransitionTime":"2025-12-02T18:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.939089 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.956894 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:41 crc kubenswrapper[4792]: I1202 18:36:41.982624 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:39Z\\\",\\\"message\\\":\\\"1.NetworkPolicy event handler 4 for removal\\\\nI1202 18:36:39.223547 6077 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 18:36:39.223597 6077 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 18:36:39.223620 6077 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 18:36:39.223657 6077 factory.go:656] Stopping watch factory\\\\nI1202 18:36:39.223685 6077 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 18:36:39.223709 6077 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:36:39.223734 6077 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 18:36:39.223756 6077 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 18:36:39.223779 6077 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 18:36:39.223801 6077 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 18:36:39.223824 6077 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 18:36:39.223912 6077 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 18:36:39.224471 6077 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:41Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.011420 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.034233 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.037895 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.037972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.037990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.038027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.038050 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.057288 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.075654 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.094135 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.098990 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt"] Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.099747 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.102357 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.104762 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.123394 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.143143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.143236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.143262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.143302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.143327 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.146799 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.165049 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.192869 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.216230 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.240852 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.246920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.246997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.247017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.247046 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.247067 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.253272 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d2adea4f-f2e8-4311-8d3b-e720e68530eb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.253384 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgfcx\" (UniqueName: \"kubernetes.io/projected/d2adea4f-f2e8-4311-8d3b-e720e68530eb-kube-api-access-bgfcx\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.253454 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d2adea4f-f2e8-4311-8d3b-e720e68530eb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.253504 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d2adea4f-f2e8-4311-8d3b-e720e68530eb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.267923 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.286797 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.302386 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.326689 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b57172dfec029a70b682d16da6df34bb415e50b707a80f542d2bc92c875ea18c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:39Z\\\",\\\"message\\\":\\\"1.NetworkPolicy event handler 4 for removal\\\\nI1202 18:36:39.223547 6077 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 18:36:39.223597 6077 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 18:36:39.223620 6077 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 18:36:39.223657 6077 factory.go:656] Stopping watch factory\\\\nI1202 18:36:39.223685 6077 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 18:36:39.223709 6077 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:36:39.223734 6077 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1202 18:36:39.223756 6077 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 18:36:39.223779 6077 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1202 18:36:39.223801 6077 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 18:36:39.223824 6077 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 18:36:39.223912 6077 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1202 18:36:39.224471 6077 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.342965 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.350370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.350433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.350451 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.350477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.350497 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.355112 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d2adea4f-f2e8-4311-8d3b-e720e68530eb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.355184 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d2adea4f-f2e8-4311-8d3b-e720e68530eb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.355238 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d2adea4f-f2e8-4311-8d3b-e720e68530eb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.355288 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgfcx\" (UniqueName: \"kubernetes.io/projected/d2adea4f-f2e8-4311-8d3b-e720e68530eb-kube-api-access-bgfcx\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.356221 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d2adea4f-f2e8-4311-8d3b-e720e68530eb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.356834 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d2adea4f-f2e8-4311-8d3b-e720e68530eb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.368770 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d2adea4f-f2e8-4311-8d3b-e720e68530eb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.372271 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.378629 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgfcx\" (UniqueName: \"kubernetes.io/projected/d2adea4f-f2e8-4311-8d3b-e720e68530eb-kube-api-access-bgfcx\") pod \"ovnkube-control-plane-749d76644c-nvcwt\" (UID: \"d2adea4f-f2e8-4311-8d3b-e720e68530eb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.392304 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.405764 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.419546 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.420600 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.436606 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.451070 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.453508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.453586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.453606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.453635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.453668 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.539343 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.539424 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:42 crc kubenswrapper[4792]: E1202 18:36:42.539502 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:42 crc kubenswrapper[4792]: E1202 18:36:42.539606 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.539686 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:42 crc kubenswrapper[4792]: E1202 18:36:42.539744 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.557065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.557107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.557123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.557342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.557360 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.660795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.660858 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.660871 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.660897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.660913 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.769680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.769771 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.769793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.769826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.769852 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.846956 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/1.log" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.855750 4792 scope.go:117] "RemoveContainer" containerID="39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9" Dec 02 18:36:42 crc kubenswrapper[4792]: E1202 18:36:42.856155 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.857382 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" event={"ID":"d2adea4f-f2e8-4311-8d3b-e720e68530eb","Type":"ContainerStarted","Data":"27674ce2a994b6b1f07d51d85fb1c2a50ff21ece4c9ef26cddbce8c76dfcf14e"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.873446 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.873496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.873506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.873543 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.873554 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.874250 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.898852 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.918292 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.940698 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.976769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.977100 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.977482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.977744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.977902 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:42Z","lastTransitionTime":"2025-12-02T18:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.976610 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:42 crc kubenswrapper[4792]: I1202 18:36:42.998614 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.015746 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.030322 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.041480 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.052952 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.067815 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.080092 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.080680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.080856 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.081021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.081200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.081483 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.096280 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.109796 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.123049 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.185566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.185629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.185647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.185671 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.185691 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.289379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.289452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.289471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.289497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.289515 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.393891 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.393966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.394330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.394383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.394396 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.497049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.497096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.497108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.497129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.497139 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.600392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.600437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.600446 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.600461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.600474 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.632004 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-2ls4m"] Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.632894 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.633027 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.647712 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.665179 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.670021 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.670201 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:36:59.670178205 +0000 UTC m=+50.443070543 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.686977 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.704117 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.704180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.704200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.704226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.704257 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.713350 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.733453 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.745154 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.758308 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.771467 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.771509 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.771558 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.771582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.771600 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcjpr\" (UniqueName: \"kubernetes.io/projected/778806a7-7e6f-4776-8233-b42b296ebc52-kube-api-access-vcjpr\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.771620 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771637 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771728 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:59.771686365 +0000 UTC m=+50.544578703 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771738 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771757 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771767 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771802 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:59.771790788 +0000 UTC m=+50.544683106 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771730 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771838 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:59.771833309 +0000 UTC m=+50.544725637 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771848 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771881 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771897 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.771965 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:59.771941072 +0000 UTC m=+50.544833470 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.772179 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.786394 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.799597 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.811005 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.811889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.811920 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.811932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.811951 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.811963 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.829061 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.843803 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.858273 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.861336 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" event={"ID":"d2adea4f-f2e8-4311-8d3b-e720e68530eb","Type":"ContainerStarted","Data":"93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.861377 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" event={"ID":"d2adea4f-f2e8-4311-8d3b-e720e68530eb","Type":"ContainerStarted","Data":"f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.871838 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.871978 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.872044 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcjpr\" (UniqueName: \"kubernetes.io/projected/778806a7-7e6f-4776-8233-b42b296ebc52-kube-api-access-vcjpr\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.872137 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: E1202 18:36:43.872200 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:44.372182647 +0000 UTC m=+35.145074975 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.887795 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.894231 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcjpr\" (UniqueName: \"kubernetes.io/projected/778806a7-7e6f-4776-8233-b42b296ebc52-kube-api-access-vcjpr\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.898372 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.914408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.914442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.914451 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.914468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.914478 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:43Z","lastTransitionTime":"2025-12-02T18:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.915000 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.928149 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.941326 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.955777 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.972270 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:43 crc kubenswrapper[4792]: I1202 18:36:43.986221 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:43Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.002357 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.018155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.018216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.018238 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.018266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.018288 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.018704 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.036663 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.052132 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.067971 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.087489 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.105213 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.121068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.121121 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.121139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.121162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.121179 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.127129 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.143068 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:44Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.224469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.224569 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.224589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.224615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.224633 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.327502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.327579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.327588 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.327605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.327618 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.377644 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:44 crc kubenswrapper[4792]: E1202 18:36:44.377817 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:44 crc kubenswrapper[4792]: E1202 18:36:44.377899 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:45.377876038 +0000 UTC m=+36.150768366 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.430320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.430373 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.430386 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.430406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.430418 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.533908 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.533985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.534000 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.534025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.534040 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.539234 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.539375 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:44 crc kubenswrapper[4792]: E1202 18:36:44.539677 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.539741 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:44 crc kubenswrapper[4792]: E1202 18:36:44.539817 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:44 crc kubenswrapper[4792]: E1202 18:36:44.539984 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.636981 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.637139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.637158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.637183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.637204 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.740260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.740742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.740940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.741106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.741349 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.849841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.849916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.849937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.849970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.849997 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.953314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.953391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.953410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.953441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:44 crc kubenswrapper[4792]: I1202 18:36:44.953466 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:44Z","lastTransitionTime":"2025-12-02T18:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.056860 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.057183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.057311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.057434 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.057606 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.161198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.161278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.161299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.161330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.161353 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.265503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.265862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.265993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.266192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.266472 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.370485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.370581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.370601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.370631 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.370653 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.389552 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:45 crc kubenswrapper[4792]: E1202 18:36:45.389806 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:45 crc kubenswrapper[4792]: E1202 18:36:45.389919 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:47.389886416 +0000 UTC m=+38.162778784 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.473778 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.473846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.473865 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.473900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.473922 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.539648 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:45 crc kubenswrapper[4792]: E1202 18:36:45.539928 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.577603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.577691 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.577705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.577726 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.577740 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.681992 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.682074 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.682091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.682118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.682138 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.784818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.784887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.784904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.784932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.784949 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.887115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.887172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.887190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.887214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.887232 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.989713 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.989793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.989812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.989837 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:45 crc kubenswrapper[4792]: I1202 18:36:45.989855 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:45Z","lastTransitionTime":"2025-12-02T18:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.093461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.093512 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.093547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.093567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.093578 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.197332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.197398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.197416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.197437 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.197455 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.300885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.300966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.300987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.301019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.301042 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.404514 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.404613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.404629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.404658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.404678 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.508932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.508996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.509009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.509034 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.509049 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.538941 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.538983 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.539475 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:46 crc kubenswrapper[4792]: E1202 18:36:46.539761 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:46 crc kubenswrapper[4792]: E1202 18:36:46.540102 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:46 crc kubenswrapper[4792]: E1202 18:36:46.540315 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.611857 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.611929 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.611947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.611975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.611994 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.715712 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.715787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.715809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.715843 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.715865 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.819167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.819214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.819224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.819241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.819253 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.921879 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.921954 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.921973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.922056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:46 crc kubenswrapper[4792]: I1202 18:36:46.922088 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:46Z","lastTransitionTime":"2025-12-02T18:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.025198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.025273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.025292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.025326 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.025348 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.128976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.129050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.129066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.129090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.129107 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.232547 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.232613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.232632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.232656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.232674 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.336398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.336478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.336505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.336572 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.336595 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.414665 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:47 crc kubenswrapper[4792]: E1202 18:36:47.414988 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:47 crc kubenswrapper[4792]: E1202 18:36:47.415136 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:51.415102423 +0000 UTC m=+42.187994861 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.439967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.440014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.440030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.440055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.440071 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.539068 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:47 crc kubenswrapper[4792]: E1202 18:36:47.539319 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.543985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.544065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.544095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.544131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.544157 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.647832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.647902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.647923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.647952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.647972 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.751089 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.751162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.751181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.751208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.751228 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.853975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.854044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.854063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.854088 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.854109 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.937019 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.953786 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:47Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.956439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.956702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.956887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.957264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.957601 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:47Z","lastTransitionTime":"2025-12-02T18:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.969764 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:47Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:47 crc kubenswrapper[4792]: I1202 18:36:47.987160 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:47Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.005940 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.020761 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.042222 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.061820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.062046 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.062557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.062630 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.062730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.063030 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.081846 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.104251 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.122709 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.145467 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.168085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.168241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.168271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.168345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.168371 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.177197 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.197159 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.211592 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.226813 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.243074 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:48Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.272171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.272227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.272245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.272270 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.272289 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.375818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.375861 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.375869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.375901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.375912 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.479275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.479356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.479383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.479418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.479445 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.538716 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.538860 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:48 crc kubenswrapper[4792]: E1202 18:36:48.539091 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.539342 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:48 crc kubenswrapper[4792]: E1202 18:36:48.539513 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:48 crc kubenswrapper[4792]: E1202 18:36:48.539942 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.582665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.582728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.582780 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.582806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.582824 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.685358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.685407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.685422 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.685441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.685456 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.789085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.789124 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.789136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.789153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.789165 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.893678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.893761 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.893788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.893829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.893858 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.997394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.997436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.997452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.997471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:48 crc kubenswrapper[4792]: I1202 18:36:48.997486 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:48Z","lastTransitionTime":"2025-12-02T18:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.100476 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.101130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.101302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.101426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.101584 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.205736 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.205808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.205826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.205853 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.205870 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.308587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.308676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.308695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.308727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.308750 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.412909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.412979 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.413001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.413037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.413064 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.516259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.516350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.516375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.516413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.516440 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.539032 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:49 crc kubenswrapper[4792]: E1202 18:36:49.539772 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.562601 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.588139 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.609236 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.619714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.619956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.620054 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.620165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.620271 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.631492 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.670357 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.693330 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.721200 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.723439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.723470 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.723482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.723500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.723524 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.739904 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.752969 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.769177 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.824182 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.826982 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.827044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.827063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.827093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.827114 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.847081 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.864103 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.878184 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.895981 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.910179 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.929613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.929651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.929682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.929700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:49 crc kubenswrapper[4792]: I1202 18:36:49.929710 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:49Z","lastTransitionTime":"2025-12-02T18:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.032452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.032507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.032520 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.032552 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.032563 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.135292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.135353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.135367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.135392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.135407 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.238560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.238641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.238661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.238690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.238709 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.341645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.341747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.341800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.341838 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.341857 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.445503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.445648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.445672 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.445700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.445718 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.538958 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.539021 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.539047 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:50 crc kubenswrapper[4792]: E1202 18:36:50.539203 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:50 crc kubenswrapper[4792]: E1202 18:36:50.539309 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:50 crc kubenswrapper[4792]: E1202 18:36:50.539591 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.549469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.549556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.549576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.549605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.549627 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.653733 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.653803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.653822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.653851 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.653872 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.757562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.757640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.757666 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.757705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.757730 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.860568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.860634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.860653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.860679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.860697 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.963589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.963659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.963681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.963741 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:50 crc kubenswrapper[4792]: I1202 18:36:50.963761 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:50Z","lastTransitionTime":"2025-12-02T18:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.067413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.067500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.067558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.067592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.067614 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.169984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.170055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.170081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.170116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.170144 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.273096 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.273164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.273181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.273211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.273231 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.376076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.376150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.376168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.376201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.376219 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.441494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.441623 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.441648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.441678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.441698 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.462019 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.461902 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:51Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.462249 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.462344 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:36:59.462317623 +0000 UTC m=+50.235210181 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.468167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.468252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.468276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.468323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.468348 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.491395 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:51Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.496793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.496862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.496880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.496909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.496933 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.515865 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:51Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.519965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.520027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.520050 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.520079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.520103 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.539739 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.539893 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.541672 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:51Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.546769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.546817 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.546846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.546868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.546878 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.575666 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:51Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:51 crc kubenswrapper[4792]: E1202 18:36:51.575817 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.577721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.577777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.577797 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.577823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.577842 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.680758 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.680821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.680839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.680860 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.680878 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.783817 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.783876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.783888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.783910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.783924 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.868868 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.870314 4792 scope.go:117] "RemoveContainer" containerID="39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.887139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.887204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.887231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.887265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.887289 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.990986 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.991053 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.991071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.991098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:51 crc kubenswrapper[4792]: I1202 18:36:51.991118 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:51Z","lastTransitionTime":"2025-12-02T18:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.094948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.095010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.095019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.095034 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.095044 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.199024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.199058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.199067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.199080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.199087 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.301998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.302083 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.302106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.302141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.302165 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.404976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.405033 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.405044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.405070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.405086 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.507848 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.507902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.507916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.507936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.507949 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.539283 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.539358 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:52 crc kubenswrapper[4792]: E1202 18:36:52.539435 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.539368 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:52 crc kubenswrapper[4792]: E1202 18:36:52.539568 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:52 crc kubenswrapper[4792]: E1202 18:36:52.539701 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.611280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.611359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.611398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.611434 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.611464 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.713906 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.714301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.714321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.714342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.714356 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.817516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.817676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.817695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.817721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.817739 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.904610 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/1.log" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.909213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.909976 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.921009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.921064 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.921082 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.921105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.921122 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:52Z","lastTransitionTime":"2025-12-02T18:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.928672 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.952734 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.970019 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:52 crc kubenswrapper[4792]: I1202 18:36:52.987674 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.005382 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.024513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.024612 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.024629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.024652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.024670 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.028495 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.048886 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.067443 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.087956 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.109039 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.128226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.128289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.128308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.128332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.128350 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.128478 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.154371 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.175439 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.203054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.219886 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.231269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.231322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.231339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.231362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.231378 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.243354 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.334579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.334646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.334673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.334701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.334723 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.437892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.437964 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.437989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.438020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.438047 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.539478 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:53 crc kubenswrapper[4792]: E1202 18:36:53.539757 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.542337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.542379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.542396 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.542418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.542436 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.646296 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.646387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.646408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.646444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.646465 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.750329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.750398 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.750419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.750447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.750465 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.853595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.853660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.853685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.853713 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.853734 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.916653 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/2.log" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.918113 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/1.log" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.924850 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c" exitCode=1 Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.924914 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.924976 4792 scope.go:117] "RemoveContainer" containerID="39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.926373 4792 scope.go:117] "RemoveContainer" containerID="7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c" Dec 02 18:36:53 crc kubenswrapper[4792]: E1202 18:36:53.926821 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.946099 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.957856 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.957938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.957960 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.957991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.958020 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:53Z","lastTransitionTime":"2025-12-02T18:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.968772 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:53 crc kubenswrapper[4792]: I1202 18:36:53.986090 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.002470 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:53Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.020391 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.042583 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.062013 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.062777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.063111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.063260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.063411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.063605 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.083156 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.104408 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.124819 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.146290 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.193463 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.195457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.195927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.195953 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.195984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.195998 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.216991 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.237860 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.258998 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.293251 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39f390819a31ac0267b87bfa0f65ba5363f17fa74b6050fc556cd4536ff883d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:41Z\\\",\\\"message\\\":\\\"d from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc0076112ad 0xc0076112ae}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,Port:50051,TargetPort:{0 50051 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{olm.catalogSource: redhat-marketplace,olm.managed: true,},ClusterIP:10.217.5.140,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.140],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1202 18:36:40.665405 6239 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.299047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.299121 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.299149 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.299215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.299244 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.403815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.403888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.403910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.403943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.403965 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.508041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.508104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.508120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.508145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.508165 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.539802 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.539883 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.539807 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:54 crc kubenswrapper[4792]: E1202 18:36:54.539988 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:54 crc kubenswrapper[4792]: E1202 18:36:54.540090 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:54 crc kubenswrapper[4792]: E1202 18:36:54.540265 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.621561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.621652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.621674 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.621710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.621737 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.725391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.725784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.726108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.726271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.726404 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.829221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.829288 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.829308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.829333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.829352 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.932069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.932130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.932151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.932178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.932197 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:54Z","lastTransitionTime":"2025-12-02T18:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.932516 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/2.log" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.940898 4792 scope.go:117] "RemoveContainer" containerID="7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c" Dec 02 18:36:54 crc kubenswrapper[4792]: E1202 18:36:54.941247 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.958363 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.974017 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:54 crc kubenswrapper[4792]: I1202 18:36:54.997603 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:54Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.018469 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.033030 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.034904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.034980 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.034997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.035022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.035040 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.053921 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.073325 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.091090 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.107628 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.131908 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.137632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.137700 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.137723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.137752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.137774 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.155584 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.187450 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.207756 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.225318 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.241589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.241682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.241701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.241772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.241791 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.242510 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.262588 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:55Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.344275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.344349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.344366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.344392 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.344409 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.447857 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.447901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.447912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.447928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.447939 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.539075 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:55 crc kubenswrapper[4792]: E1202 18:36:55.539317 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.550862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.550915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.550934 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.550973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.551012 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.654474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.654579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.654606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.654632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.654655 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.758783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.758865 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.758884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.758917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.758945 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.861676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.861766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.861796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.861826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.861856 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.965057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.965119 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.965136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.965160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:55 crc kubenswrapper[4792]: I1202 18:36:55.965179 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:55Z","lastTransitionTime":"2025-12-02T18:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.068812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.068870 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.068888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.068913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.068932 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.172575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.172646 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.172668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.172698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.172732 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.281261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.281395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.281450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.281578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.281609 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.385282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.385357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.385374 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.385402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.385422 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.489771 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.489828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.489843 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.489864 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.489908 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.538853 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.538861 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.538917 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:56 crc kubenswrapper[4792]: E1202 18:36:56.539072 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:56 crc kubenswrapper[4792]: E1202 18:36:56.539145 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:56 crc kubenswrapper[4792]: E1202 18:36:56.539461 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.592301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.592384 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.592404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.592433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.592454 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.695156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.695230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.695248 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.695280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.695302 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.800036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.800115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.800134 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.800163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.800183 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.905092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.905201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.905222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.905286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:56 crc kubenswrapper[4792]: I1202 18:36:56.905306 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:56Z","lastTransitionTime":"2025-12-02T18:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.009109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.009161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.009204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.009222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.009237 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.112658 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.112770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.112795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.112822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.112843 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.216113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.216206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.216226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.216261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.216283 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.320736 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.320840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.320869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.320906 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.320935 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.424483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.424578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.424595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.424620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.424641 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.528087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.528136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.528149 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.528168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.528181 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.539265 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:57 crc kubenswrapper[4792]: E1202 18:36:57.539480 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.630292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.630366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.630388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.630421 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.630451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.732829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.732914 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.732939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.732972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.732998 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.836884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.836959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.836978 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.837002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.837022 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.940591 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.940618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.940627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.940640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:57 crc kubenswrapper[4792]: I1202 18:36:57.940650 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:57Z","lastTransitionTime":"2025-12-02T18:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.044253 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.044320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.044337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.044367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.044387 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.148177 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.148246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.148263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.148297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.148314 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.251695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.251773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.251790 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.251824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.251854 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.355130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.355190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.355209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.355231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.355249 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.458645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.458706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.458723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.458746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.458763 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.539256 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.539295 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:58 crc kubenswrapper[4792]: E1202 18:36:58.539557 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.539295 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:58 crc kubenswrapper[4792]: E1202 18:36:58.539728 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:36:58 crc kubenswrapper[4792]: E1202 18:36:58.539874 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.561823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.561885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.561902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.561930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.562011 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.665017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.665086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.665104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.665133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.665153 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.769081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.769153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.769171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.769197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.769214 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.872165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.872243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.872261 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.872311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.872329 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.975243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.975314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.975332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.975357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:58 crc kubenswrapper[4792]: I1202 18:36:58.975375 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:58Z","lastTransitionTime":"2025-12-02T18:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.079034 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.079160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.079182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.079223 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.079253 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.181987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.182283 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.182388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.182518 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.182655 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.286789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.287341 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.287500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.287678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.287824 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.391836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.392725 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.392941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.393233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.393416 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.497391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.497454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.497469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.497491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.497509 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.539313 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.539498 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.561792 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.562378 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.562611 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.562690 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:37:15.562669227 +0000 UTC m=+66.335561565 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.582479 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.601161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.601191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.601200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.601215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.601226 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.602250 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.619370 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.640215 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.656504 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.672819 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.699343 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.704418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.704483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.704503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.704592 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.704625 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.711315 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.734222 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.750332 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.764004 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.764206 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:37:31.764171467 +0000 UTC m=+82.537063805 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.766145 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.777497 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.792595 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.806965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.807008 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.807020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.807039 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.807052 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.809981 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.830740 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:59Z is after 2025-08-24T17:21:41Z" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.865739 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.865781 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.865805 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.865826 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.865944 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.865959 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.865970 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866014 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:37:31.866001215 +0000 UTC m=+82.638893543 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866234 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866255 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866263 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866296 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:37:31.866286033 +0000 UTC m=+82.639178361 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866363 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866244 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866451 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:37:31.866434307 +0000 UTC m=+82.639326625 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: E1202 18:36:59.866713 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:37:31.866693024 +0000 UTC m=+82.639585432 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.909938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.909980 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.909991 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.910006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:36:59 crc kubenswrapper[4792]: I1202 18:36:59.910016 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:36:59Z","lastTransitionTime":"2025-12-02T18:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.012863 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.012927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.012945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.012965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.012977 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.115405 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.115478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.115493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.115516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.115554 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.217998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.218250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.218333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.218426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.218513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.321888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.322130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.322254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.322350 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.322437 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.425331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.425381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.425393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.425410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.425429 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.527511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.527593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.527607 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.527632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.527646 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.539315 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.539343 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.539337 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:00 crc kubenswrapper[4792]: E1202 18:37:00.539645 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:00 crc kubenswrapper[4792]: E1202 18:37:00.539490 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:00 crc kubenswrapper[4792]: E1202 18:37:00.539964 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.630214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.630560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.630757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.630923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.631067 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.734562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.734598 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.734606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.734620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.734628 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.837287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.837788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.838030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.838294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.838504 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.941816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.941852 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.941860 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.941873 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:00 crc kubenswrapper[4792]: I1202 18:37:00.941882 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:00Z","lastTransitionTime":"2025-12-02T18:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.043775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.043829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.043846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.043867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.043884 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.146594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.146686 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.146708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.146735 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.146755 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.249509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.249579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.249589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.249604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.249613 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.352215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.352779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.352798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.352827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.352846 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.455463 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.455498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.455506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.455538 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.455548 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.539482 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:01 crc kubenswrapper[4792]: E1202 18:37:01.539749 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.558985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.559315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.559498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.559705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.559839 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.595500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.595609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.595629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.595682 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.595700 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: E1202 18:37:01.618637 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.624044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.624143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.624164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.624186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.624205 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.631260 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.648598 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 18:37:01 crc kubenswrapper[4792]: E1202 18:37:01.649322 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.656014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.656084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.656107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.656136 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.656159 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.658511 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: E1202 18:37:01.671316 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.678243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.678295 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.678311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.678333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.678350 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.684490 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: E1202 18:37:01.699361 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.704582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.704699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.704718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.704773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.704792 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.705036 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: E1202 18:37:01.721232 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: E1202 18:37:01.722232 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.722504 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.724284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.724341 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.724361 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.724388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.724442 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.735837 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.751062 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.782309 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.799262 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.817468 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.827116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.827189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.827209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.827235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.827252 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.835392 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.849870 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.862447 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.875490 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.888997 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.910036 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.929725 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:01Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.930623 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.930697 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.930721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.930752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:01 crc kubenswrapper[4792]: I1202 18:37:01.930775 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:01Z","lastTransitionTime":"2025-12-02T18:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.033233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.033291 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.033333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.033362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.033381 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.136329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.136391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.136409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.136432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.136452 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.239807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.239867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.239890 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.239922 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.239940 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.343500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.343603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.343620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.343645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.343666 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.446898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.446956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.446975 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.446998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.447015 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.539117 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.539211 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.539125 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:02 crc kubenswrapper[4792]: E1202 18:37:02.539304 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:02 crc kubenswrapper[4792]: E1202 18:37:02.539454 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:02 crc kubenswrapper[4792]: E1202 18:37:02.539643 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.549300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.549391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.549410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.549433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.549450 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.652242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.652377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.652399 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.652424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.652444 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.755653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.755714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.755733 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.755757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.755774 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.862427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.862461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.862469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.862480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.862489 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.965047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.965090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.965102 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.965118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:02 crc kubenswrapper[4792]: I1202 18:37:02.965129 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:02Z","lastTransitionTime":"2025-12-02T18:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.067510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.067562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.067569 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.067580 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.067588 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.171204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.171241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.171250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.171263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.171271 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.273940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.273967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.273976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.273989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.274001 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.377181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.377233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.377249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.377272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.377290 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.480025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.480103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.480130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.480158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.480176 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.539705 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:03 crc kubenswrapper[4792]: E1202 18:37:03.539898 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.581822 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.581880 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.581898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.581922 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.581940 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.684372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.684407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.684415 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.684427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.684435 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.787481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.787585 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.787604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.787625 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.787644 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.890681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.890744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.890771 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.890800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.890824 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.993408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.993467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.993483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.993505 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:03 crc kubenswrapper[4792]: I1202 18:37:03.993550 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:03Z","lastTransitionTime":"2025-12-02T18:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.095602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.095652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.095668 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.095689 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.095705 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.199084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.199142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.199163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.199191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.199214 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.302104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.302176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.302198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.302227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.302248 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.404132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.404200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.404211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.404225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.404235 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.506864 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.506905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.506916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.506933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.506944 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.538742 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.538826 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.538907 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:04 crc kubenswrapper[4792]: E1202 18:37:04.538848 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:04 crc kubenswrapper[4792]: E1202 18:37:04.538955 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:04 crc kubenswrapper[4792]: E1202 18:37:04.539056 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.609333 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.609379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.609396 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.609417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.609433 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.712839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.712872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.712881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.712894 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.712903 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.815830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.815912 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.815925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.815944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.815957 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.919198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.919242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.919256 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.919272 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:04 crc kubenswrapper[4792]: I1202 18:37:04.919283 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:04Z","lastTransitionTime":"2025-12-02T18:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.023144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.023205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.023222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.023246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.023263 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.127414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.127461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.127472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.127491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.127502 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.230240 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.230318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.230339 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.230368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.230393 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.332937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.333005 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.333028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.333059 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.333082 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.435970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.436011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.436028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.436048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.436064 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.538425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.538492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.538516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.538656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.538681 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.539488 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:05 crc kubenswrapper[4792]: E1202 18:37:05.539755 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.642200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.642252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.642267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.642289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.642306 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.745551 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.745642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.745650 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.745664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.745675 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.849372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.849458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.849476 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.849503 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.849561 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.952508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.952557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.952568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.952584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:05 crc kubenswrapper[4792]: I1202 18:37:05.952594 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:05Z","lastTransitionTime":"2025-12-02T18:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.055535 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.055578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.055587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.055603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.055614 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.158033 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.158065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.158073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.158088 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.158097 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.260562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.260599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.260609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.260623 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.260635 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.363544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.363587 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.363596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.363609 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.363618 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.466641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.466690 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.466701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.466721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.466738 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.539743 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.539828 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.539793 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:06 crc kubenswrapper[4792]: E1202 18:37:06.539967 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:06 crc kubenswrapper[4792]: E1202 18:37:06.540298 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:06 crc kubenswrapper[4792]: E1202 18:37:06.540435 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.541845 4792 scope.go:117] "RemoveContainer" containerID="7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c" Dec 02 18:37:06 crc kubenswrapper[4792]: E1202 18:37:06.542102 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.570012 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.570067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.570085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.570112 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.570137 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.673708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.673743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.673752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.673766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.673779 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.776819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.776855 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.776862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.776876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.776886 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.880730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.880785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.880798 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.880816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.880833 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.984405 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.984566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.984595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.984635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:06 crc kubenswrapper[4792]: I1202 18:37:06.984661 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:06Z","lastTransitionTime":"2025-12-02T18:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.087509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.087574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.087586 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.087604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.087616 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.191289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.191349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.191366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.191388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.191402 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.294732 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.294788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.294799 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.294819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.294834 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.398164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.398204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.398214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.398231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.398240 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.500927 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.500997 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.501019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.501052 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.501074 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.539627 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:07 crc kubenswrapper[4792]: E1202 18:37:07.539978 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.604970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.605066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.605086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.605113 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.605133 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.708584 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.708645 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.708667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.708692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.708710 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.811992 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.812072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.812104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.812139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.812166 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.915488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.915550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.915563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.915582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:07 crc kubenswrapper[4792]: I1202 18:37:07.915593 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:07Z","lastTransitionTime":"2025-12-02T18:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.018851 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.019228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.019371 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.019511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.019693 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.122473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.122509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.122532 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.122549 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.122559 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.225197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.225263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.225288 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.225317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.225342 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.327947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.328044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.328064 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.328097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.328119 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.431742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.431835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.431858 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.431896 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.431922 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.534654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.534683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.534692 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.534707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.534716 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.539148 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:08 crc kubenswrapper[4792]: E1202 18:37:08.539238 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.539248 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.539403 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:08 crc kubenswrapper[4792]: E1202 18:37:08.539452 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:08 crc kubenswrapper[4792]: E1202 18:37:08.539661 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.637211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.637249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.637258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.637274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.637283 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.739406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.739491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.739564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.739663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.740164 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.843329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.843394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.843411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.843436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.843452 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.945665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.945715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.945731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.945759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:08 crc kubenswrapper[4792]: I1202 18:37:08.945780 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:08Z","lastTransitionTime":"2025-12-02T18:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.048084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.048155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.048173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.048196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.048213 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.152235 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.152299 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.152315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.152338 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.152355 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.256407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.256481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.256501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.256567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.256592 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.359358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.359411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.359427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.359448 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.359465 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.462698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.462755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.462773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.462795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.462812 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.539729 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:09 crc kubenswrapper[4792]: E1202 18:37:09.540166 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.562990 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.569564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.569662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.569694 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.569740 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.569778 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.585461 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.603793 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.619868 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.633727 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.656786 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.673874 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.673977 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.674037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.674060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.674079 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.678212 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.692486 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.704802 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.721200 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.738422 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.768164 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.776793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.777030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.777206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.777369 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.777588 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.785509 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.808054 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.827090 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.843494 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.861022 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:09Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.881092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.881167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.881184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.881214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.881232 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.983254 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.983327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.983345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.983373 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:09 crc kubenswrapper[4792]: I1202 18:37:09.983391 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:09Z","lastTransitionTime":"2025-12-02T18:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.086971 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.087029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.087049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.087077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.087097 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.189962 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.190014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.190032 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.190056 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.190073 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.293479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.293595 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.293623 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.293655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.293678 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.396190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.396247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.396259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.396281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.396296 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.499109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.499189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.499214 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.499247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.499270 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.539587 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:10 crc kubenswrapper[4792]: E1202 18:37:10.539751 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.539817 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:10 crc kubenswrapper[4792]: E1202 18:37:10.539991 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.540146 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:10 crc kubenswrapper[4792]: E1202 18:37:10.540399 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.608786 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.608865 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.608886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.608914 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.608934 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.711878 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.711928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.711937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.711952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.711961 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.815514 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.815604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.815623 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.815649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.815666 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.919447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.919600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.919624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.919654 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:10 crc kubenswrapper[4792]: I1202 18:37:10.919675 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:10Z","lastTransitionTime":"2025-12-02T18:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.021471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.021516 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.021555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.021575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.021587 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.124844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.124882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.124891 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.124905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.124914 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.228399 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.228485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.228508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.228563 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.228582 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.333021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.333083 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.333099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.333121 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.333142 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.435328 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.435371 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.435382 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.435397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.435408 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.538255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.538311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.538324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.538344 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.538358 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.538842 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:11 crc kubenswrapper[4792]: E1202 18:37:11.538973 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.640809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.640889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.640907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.640937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.640957 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.743468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.743543 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.743557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.743575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.743586 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.847603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.847660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.847678 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.847706 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.847726 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.951004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.951072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.951098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.951131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:11 crc kubenswrapper[4792]: I1202 18:37:11.951153 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:11Z","lastTransitionTime":"2025-12-02T18:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.005946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.006018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.006041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.006075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.006102 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.027379 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:12Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.032767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.032803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.032818 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.032842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.032863 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.049015 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:12Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.054004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.054035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.054048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.054070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.054085 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.073209 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:12Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.077183 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.077222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.077233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.077246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.077255 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.090740 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:12Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.102760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.102830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.102853 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.102901 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.102924 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.128139 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:12Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.128411 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.132608 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.132665 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.132685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.132714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.132734 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.236415 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.236487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.236509 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.236578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.236601 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.340168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.340237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.340256 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.340287 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.340309 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.443765 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.443815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.443826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.443842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.443854 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.539061 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.539151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.539225 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.539061 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.539298 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:12 crc kubenswrapper[4792]: E1202 18:37:12.539448 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.546160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.546197 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.546210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.546224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.546235 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.647849 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.647902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.647915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.647932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.647948 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.751227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.751292 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.751310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.751335 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.751352 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.853486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.853602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.853624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.853648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.853665 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.956472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.956616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.956640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.956664 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:12 crc kubenswrapper[4792]: I1202 18:37:12.956680 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:12Z","lastTransitionTime":"2025-12-02T18:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.059026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.059066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.059076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.059091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.059101 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.161792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.161827 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.161835 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.161849 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.161858 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.265211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.265274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.265295 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.265323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.265344 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.368948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.369020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.369038 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.369065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.369084 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.471791 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.471853 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.471869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.471892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.471907 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.539286 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:13 crc kubenswrapper[4792]: E1202 18:37:13.539587 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.575888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.575972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.575995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.576024 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.576070 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.679673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.679712 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.679720 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.679737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.679748 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.782804 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.782849 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.782859 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.782876 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.782887 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.885681 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.885720 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.885728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.885743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.885753 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.988348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.988394 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.988407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.988425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:13 crc kubenswrapper[4792]: I1202 18:37:13.988438 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:13Z","lastTransitionTime":"2025-12-02T18:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.091961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.092014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.092025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.092046 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.092057 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.194590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.194648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.194667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.194689 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.194708 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.297463 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.297575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.297600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.297631 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.297663 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.400309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.400383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.400406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.400436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.400458 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.503356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.503451 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.503471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.503494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.503510 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.539027 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.539084 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.539049 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:14 crc kubenswrapper[4792]: E1202 18:37:14.539229 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:14 crc kubenswrapper[4792]: E1202 18:37:14.539363 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:14 crc kubenswrapper[4792]: E1202 18:37:14.539509 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.606452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.606562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.606576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.606601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.606636 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.709959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.710301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.710411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.710513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.710657 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.813290 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.813325 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.813335 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.813348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.813359 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.915911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.916318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.916388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.916458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:14 crc kubenswrapper[4792]: I1202 18:37:14.916550 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:14Z","lastTransitionTime":"2025-12-02T18:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.018882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.019137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.019276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.019407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.019568 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.121911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.122127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.122250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.122379 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.122500 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.225383 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.225429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.225441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.225459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.225471 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.328777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.328816 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.328824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.328841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.328849 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.431853 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.431921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.431943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.431970 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.431988 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.534412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.534465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.534482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.534506 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.534619 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.539142 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:15 crc kubenswrapper[4792]: E1202 18:37:15.539312 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.636999 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.637180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.637275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.637365 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.637444 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.642474 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:15 crc kubenswrapper[4792]: E1202 18:37:15.642677 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:37:15 crc kubenswrapper[4792]: E1202 18:37:15.642738 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:37:47.642719142 +0000 UTC m=+98.415611470 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.739813 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.739859 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.739868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.739884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.739896 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.841796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.841850 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.841864 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.841881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.841895 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.944785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.944831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.944839 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.944855 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:15 crc kubenswrapper[4792]: I1202 18:37:15.944865 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:15Z","lastTransitionTime":"2025-12-02T18:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.017906 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/0.log" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.018345 4792 generic.go:334] "Generic (PLEG): container finished" podID="6925e194-2dc8-4a3a-aa76-8db41ff27997" containerID="c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805" exitCode=1 Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.018468 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerDied","Data":"c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.019129 4792 scope.go:117] "RemoveContainer" containerID="c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.151959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.152281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.152290 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.152304 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.152314 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.161252 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.175435 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.188173 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.206186 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.219277 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.233642 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.244868 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.254275 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.254324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.254336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.254353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.254365 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.255210 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.270683 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.280050 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.292256 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.306551 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.320032 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.330945 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.343143 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.357086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.357127 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.357143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.357160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.357173 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.358283 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.369787 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:16Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.459346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.459380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.459391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.459408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.459421 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.538690 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:16 crc kubenswrapper[4792]: E1202 18:37:16.538845 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.539070 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:16 crc kubenswrapper[4792]: E1202 18:37:16.539136 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.539274 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:16 crc kubenswrapper[4792]: E1202 18:37:16.539374 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.561795 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.561825 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.561836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.561850 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.561862 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.664656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.664684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.664693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.664705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.664715 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.767159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.767191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.767201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.767216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.767228 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.869821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.869853 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.869862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.869882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.869894 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.972217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.972265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.972277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.972294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:16 crc kubenswrapper[4792]: I1202 18:37:16.972307 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:16Z","lastTransitionTime":"2025-12-02T18:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.023948 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/0.log" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.024005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerStarted","Data":"58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.038062 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.075363 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.075393 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.075401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.075414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.075425 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.077127 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.106446 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.122550 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.135834 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.149813 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.163811 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.177002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.177037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.177048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.177066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.177077 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.178546 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.191620 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.205471 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.228139 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.242427 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.251656 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.265853 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.279051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.279085 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.279094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.279110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.279120 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.283536 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.295841 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.307138 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:17Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.382079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.382105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.382114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.382128 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.382138 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.485074 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.485119 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.485132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.485152 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.485166 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.538909 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:17 crc kubenswrapper[4792]: E1202 18:37:17.539133 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.540246 4792 scope.go:117] "RemoveContainer" containerID="7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.590711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.591307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.591357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.591376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.591386 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.693309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.693329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.693336 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.693347 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.693357 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.798627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.798655 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.798663 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.798676 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.798684 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.901416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.901444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.901452 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.901464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:17 crc kubenswrapper[4792]: I1202 18:37:17.901473 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:17Z","lastTransitionTime":"2025-12-02T18:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.003402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.003478 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.003486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.003501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.003509 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.028449 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/2.log" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.030306 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.031188 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.045141 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.061229 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.074393 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.085562 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.103043 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.105573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.105601 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.105611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.105627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.105637 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.121571 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.135034 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.150652 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.168338 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.184237 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.196288 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.207671 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.207702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.207711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.207723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.207732 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.212097 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.223946 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.236259 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.256896 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.269595 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.286932 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:18Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.309688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.309735 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.309743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.309759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.309773 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.411555 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.411600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.411611 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.411627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.411638 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.513820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.513856 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.513869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.513884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.513895 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.539474 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.539643 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.539779 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:18 crc kubenswrapper[4792]: E1202 18:37:18.539768 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:18 crc kubenswrapper[4792]: E1202 18:37:18.539906 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:18 crc kubenswrapper[4792]: E1202 18:37:18.539947 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.616243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.616274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.616283 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.616300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.616313 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.718985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.719017 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.719025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.719041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.719051 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.821556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.821617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.821630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.821652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.821665 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.955494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.955560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.955575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.955594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:18 crc kubenswrapper[4792]: I1202 18:37:18.955608 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:18Z","lastTransitionTime":"2025-12-02T18:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.058703 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.058745 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.058753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.058766 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.058776 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.162109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.162334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.162397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.162431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.162450 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.264888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.264958 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.264977 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.265002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.265022 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.367753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.367811 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.367829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.367854 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.367871 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.470590 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.470627 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.470636 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.470652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.470662 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.539508 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:19 crc kubenswrapper[4792]: E1202 18:37:19.539793 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.559159 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.571974 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.573375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.573436 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.573457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.573482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.573503 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.590497 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.616270 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.633087 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.652372 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.675821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.675875 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.675888 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.675908 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.675923 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.676565 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.694706 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.709181 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.724830 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.741120 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.757830 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.772148 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.777968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.777993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.778001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.778015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.778024 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.786640 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.803041 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.820625 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.836043 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:19Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.879902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.879971 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.879993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.880023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.880046 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.983267 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.983310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.983319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.983337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:19 crc kubenswrapper[4792]: I1202 18:37:19.983346 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:19Z","lastTransitionTime":"2025-12-02T18:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.041268 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/3.log" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.042691 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/2.log" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.046536 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.046614 4792 scope.go:117] "RemoveContainer" containerID="7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.046939 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" exitCode=1 Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.047780 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:37:20 crc kubenswrapper[4792]: E1202 18:37:20.047968 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.068482 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.085724 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.086417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.086489 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.086647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.086701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.086718 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.102654 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.117196 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.139845 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.158658 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.181928 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.189086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.189115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.189125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.189140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.189149 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.204408 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.217604 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.228855 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.241966 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.253759 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.267209 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.286313 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:19Z\\\",\\\"message\\\":\\\" ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006ec7d17 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 18:37:18.318153 6766 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:37:18.318284 6766 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 18:37:18.318136 6766 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:37:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.291114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.291222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.291289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.291352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.291422 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.300155 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.310780 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.326495 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:20Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.394389 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.394485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.394575 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.394656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.394728 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.497162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.497231 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.497249 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.497274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.497291 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.539552 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.539660 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:20 crc kubenswrapper[4792]: E1202 18:37:20.539710 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:20 crc kubenswrapper[4792]: E1202 18:37:20.539842 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.539869 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:20 crc kubenswrapper[4792]: E1202 18:37:20.539993 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.601150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.601193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.601203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.601220 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.601232 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.703869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.703909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.703952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.703966 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.703977 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.807188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.807233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.807247 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.807262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.807273 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.910749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.910801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.910814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.910836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:20 crc kubenswrapper[4792]: I1202 18:37:20.910849 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:20Z","lastTransitionTime":"2025-12-02T18:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.013377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.013440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.013459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.013484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.013502 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.055195 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/3.log" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.117195 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.117270 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.117289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.117321 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.117343 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.220152 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.220203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.220215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.220234 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.220245 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.323353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.323408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.323418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.323438 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.323450 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.426406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.426467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.426482 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.426502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.426513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.529800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.529850 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.529862 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.529883 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.529897 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.539732 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:21 crc kubenswrapper[4792]: E1202 18:37:21.539994 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.632225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.632266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.632278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.632295 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.632307 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.734796 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.734842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.734854 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.734869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.734879 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.838845 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.838910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.838925 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.838946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.838960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.942756 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.942909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.942933 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.942962 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:21 crc kubenswrapper[4792]: I1202 18:37:21.942980 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:21Z","lastTransitionTime":"2025-12-02T18:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.047399 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.047450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.047459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.047474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.047486 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.150719 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.150792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.150812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.150837 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.150854 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.183712 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.183853 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.183875 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.183902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.183919 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.200883 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:22Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.205998 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.206071 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.206091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.206123 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.206147 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.224892 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:22Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.230476 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.230573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.230594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.230620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.230637 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.250152 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:22Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.256464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.256557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.256576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.256633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.256650 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.275091 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:22Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.280629 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.280688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.280708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.280731 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.280752 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.302146 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:22Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.302490 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.305752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.305805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.305823 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.305843 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.305860 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.409656 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.409711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.409720 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.409737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.409749 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.513924 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.513979 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.513994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.514014 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.514028 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.539641 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.539661 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.539791 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.539843 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.539939 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:22 crc kubenswrapper[4792]: E1202 18:37:22.540155 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.617237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.617283 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.617300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.617319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.617334 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.720767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.720840 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.720860 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.720886 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.720906 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.823359 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.823410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.823419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.823441 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.823451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.927472 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.927577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.927603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.927642 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:22 crc kubenswrapper[4792]: I1202 18:37:22.927668 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:22Z","lastTransitionTime":"2025-12-02T18:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.031143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.031218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.031237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.031269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.031290 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.144323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.144386 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.144402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.144427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.144442 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.248820 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.248915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.248944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.248974 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.248992 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.352009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.352084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.352105 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.352131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.352183 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.456465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.456594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.456617 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.456647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.456667 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.539496 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:23 crc kubenswrapper[4792]: E1202 18:37:23.539753 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.559355 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.559433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.559453 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.559481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.559503 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.662983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.663078 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.663103 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.663140 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.663164 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.766814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.766889 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.766907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.766935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.766953 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.871015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.871092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.871110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.871135 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.871154 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.974036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.974077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.974086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.974102 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:23 crc kubenswrapper[4792]: I1202 18:37:23.974111 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:23Z","lastTransitionTime":"2025-12-02T18:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.076905 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.076959 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.076971 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.076989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.077002 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.179087 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.179152 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.179163 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.179180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.179192 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.282035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.282068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.282076 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.282090 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.282101 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.384401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.384466 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.384485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.384510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.384560 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.486950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.487010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.487027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.487052 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.487070 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.539110 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.539145 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.539112 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:24 crc kubenswrapper[4792]: E1202 18:37:24.539298 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:24 crc kubenswrapper[4792]: E1202 18:37:24.539395 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:24 crc kubenswrapper[4792]: E1202 18:37:24.539628 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.589487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.589613 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.589634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.589667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.589687 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.692212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.692282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.692305 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.692335 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.692362 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.795842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.795902 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.795918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.795944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.795960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.899152 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.899205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.899222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.899241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:24 crc kubenswrapper[4792]: I1202 18:37:24.899284 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:24Z","lastTransitionTime":"2025-12-02T18:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.002217 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.002280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.002297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.002319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.002335 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.105800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.105863 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.105881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.105904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.105921 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.209874 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.209944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.209962 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.209989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.210011 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.312829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.312873 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.312881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.312897 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.312906 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.415133 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.415202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.415219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.415246 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.415265 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.518877 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.518919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.518929 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.518944 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.518955 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.539162 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:25 crc kubenswrapper[4792]: E1202 18:37:25.539574 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.622263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.622342 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.622360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.622385 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.622412 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.725985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.726142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.726162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.726196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.726216 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.829677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.829749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.829773 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.829800 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.829818 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.933245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.933323 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.933341 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.933377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:25 crc kubenswrapper[4792]: I1202 18:37:25.933398 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:25Z","lastTransitionTime":"2025-12-02T18:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.037334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.037426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.037445 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.037471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.037489 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.141156 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.141244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.141264 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.141288 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.141309 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.243963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.244022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.244038 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.244061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.244079 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.347854 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.347926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.347943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.347972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.347993 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.451688 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.451742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.451752 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.451770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.451783 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.538872 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.538869 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:26 crc kubenswrapper[4792]: E1202 18:37:26.539076 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.538869 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:26 crc kubenswrapper[4792]: E1202 18:37:26.539301 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:26 crc kubenswrapper[4792]: E1202 18:37:26.539380 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.557826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.557947 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.557969 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.557995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.558024 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.661578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.661689 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.661710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.662715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.662785 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.766108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.766146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.766174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.766188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.766198 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.870567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.870633 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.870649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.870677 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.870696 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.974318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.974356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.974397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.974414 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:26 crc kubenswrapper[4792]: I1202 18:37:26.974425 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:26Z","lastTransitionTime":"2025-12-02T18:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.078544 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.078593 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.078607 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.078661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.078675 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.182928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.182995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.183018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.183047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.183067 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.286750 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.286815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.286832 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.286860 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.286878 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.391018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.391057 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.391068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.391084 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.391096 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.494416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.494448 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.494458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.494471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.494481 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.539313 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:27 crc kubenswrapper[4792]: E1202 18:37:27.539606 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.598279 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.598335 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.598349 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.598368 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.598379 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.702869 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.702940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.702961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.702990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.703007 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.806493 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.806615 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.806635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.806715 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.806741 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.911013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.911077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.911086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.911107 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:27 crc kubenswrapper[4792]: I1202 18:37:27.911118 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:27Z","lastTransitionTime":"2025-12-02T18:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.014486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.014577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.014604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.014639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.014660 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.118337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.118480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.118504 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.118573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.118595 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.221787 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.221861 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.221883 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.221918 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.221945 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.324719 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.324788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.324809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.324836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.324854 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.427865 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.428699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.428730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.428762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.428782 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.531381 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.531431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.531448 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.531470 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.531486 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.539624 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.539744 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:28 crc kubenswrapper[4792]: E1202 18:37:28.539890 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:28 crc kubenswrapper[4792]: E1202 18:37:28.540048 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.540067 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:28 crc kubenswrapper[4792]: E1202 18:37:28.540327 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.634976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.635075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.635098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.635132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.635157 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.739129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.739181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.739193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.739210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.739223 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.842630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.842660 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.842669 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.842683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.842692 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.946021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.946052 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.946060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.946072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:28 crc kubenswrapper[4792]: I1202 18:37:28.946081 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:28Z","lastTransitionTime":"2025-12-02T18:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.049622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.049670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.049686 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.049708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.049727 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.152622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.152708 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.152730 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.152760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.152784 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.257154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.257204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.257219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.257239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.257255 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.364491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.364644 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.364680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.364727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.364768 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.470932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.471008 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.471019 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.471038 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.471049 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.539771 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:29 crc kubenswrapper[4792]: E1202 18:37:29.540021 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.557814 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.561474 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.574041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.574081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.574093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.574111 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.574123 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.579345 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.598598 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.614796 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.636376 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.655510 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.672590 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.676180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.676243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.676256 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.676277 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.676291 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.691295 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.713844 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.736157 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.775500 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7774ef77b87138ed3e8ed79d14716c2def47a9b30828fd996641b33c3db6c81c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:36:53Z\\\",\\\"message\\\":\\\" ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:36:52Z is after 2025-08-24T17:21:41Z]\\\\nI1202 18:36:52.795313 6429 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_router_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:169.254.0.2:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4de\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:19Z\\\",\\\"message\\\":\\\" ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006ec7d17 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 18:37:18.318153 6766 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:37:18.318284 6766 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 18:37:18.318136 6766 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:37:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.780745 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.780805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.780821 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.780846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.780867 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.799377 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.816151 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.843947 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.866012 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.884582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.884631 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.884652 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.884679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.884696 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.886900 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.905627 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:29Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.987431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.987621 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.987651 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.987675 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:29 crc kubenswrapper[4792]: I1202 18:37:29.987694 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:29Z","lastTransitionTime":"2025-12-02T18:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.090147 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.090196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.090209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.090227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.090240 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.193274 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.193345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.193364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.193390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.193409 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.297066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.297141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.297159 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.297188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.297207 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.401635 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.401702 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.401721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.401747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.401764 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.507239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.507327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.507352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.507390 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.507410 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.539256 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.539370 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:30 crc kubenswrapper[4792]: E1202 18:37:30.539575 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.539741 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:30 crc kubenswrapper[4792]: E1202 18:37:30.540246 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:30 crc kubenswrapper[4792]: E1202 18:37:30.540567 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.562710 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.610404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.610477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.610500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.610564 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.610590 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.713926 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.713989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.714010 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.714040 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.714060 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.818052 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.818118 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.818137 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.818167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.818192 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.922614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.922695 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.922714 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.922749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:30 crc kubenswrapper[4792]: I1202 18:37:30.922773 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:30Z","lastTransitionTime":"2025-12-02T18:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.027044 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.027126 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.027143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.027185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.027206 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.130417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.130574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.130606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.130644 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.130672 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.233936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.233984 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.234002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.234026 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.234044 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.338454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.338556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.338573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.338599 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.338617 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.442237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.442285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.442301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.442324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.442340 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.539388 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.539723 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.545982 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.546055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.546075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.546106 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.546128 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.650415 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.650475 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.650486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.650507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.650537 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.753417 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.753484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.753501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.753579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.753600 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.813337 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.813764 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:35.813733921 +0000 UTC m=+146.586626249 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.856234 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.856271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.856285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.856300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.856310 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.914768 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.914890 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.914930 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.914979 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915173 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915199 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915193 4792 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915194 4792 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915344 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:38:35.915306755 +0000 UTC m=+146.688199113 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915220 4792 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915457 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 18:38:35.915409948 +0000 UTC m=+146.688302506 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915498 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 18:38:35.915475119 +0000 UTC m=+146.688367477 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915297 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915567 4792 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915586 4792 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:37:31 crc kubenswrapper[4792]: E1202 18:37:31.915632 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 18:38:35.915617113 +0000 UTC m=+146.688509481 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.959605 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.959667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.959679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.959704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:31 crc kubenswrapper[4792]: I1202 18:37:31.959717 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:31Z","lastTransitionTime":"2025-12-02T18:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.063186 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.063283 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.063300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.063322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.063342 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.166157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.166212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.166228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.166250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.166266 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.271216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.271290 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.271309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.271761 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.272007 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.349515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.349600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.349616 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.349639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.349659 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.371782 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.377471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.377562 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.377581 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.378022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.378089 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.398925 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.403188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.403258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.403282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.403311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.403333 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.425201 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.430917 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.430987 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.431004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.431029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.431047 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.452325 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.457566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.457596 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.457606 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.457621 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.457633 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.477418 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:32Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.477701 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.480092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.480130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.480141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.480158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.480171 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.539541 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.539760 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.539830 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.539877 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.540137 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:32 crc kubenswrapper[4792]: E1202 18:37:32.540194 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.584973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.585041 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.585058 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.585120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.585143 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.688467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.688542 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.688556 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.688577 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.688590 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.791771 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.791850 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.791868 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.791898 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.791921 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.895582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.895661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.895680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.895712 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:32 crc kubenswrapper[4792]: I1202 18:37:32.895735 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:32Z","lastTransitionTime":"2025-12-02T18:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.000649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.000721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.000739 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.000769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.000788 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.105855 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.105939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.105963 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.105996 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.106021 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.210051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.210138 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.210158 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.210185 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.210203 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.315607 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.315723 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.315747 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.315782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.315812 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.420608 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.421250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.421276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.421310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.421340 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.525409 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.525488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.525507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.525568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.525596 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.539513 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:33 crc kubenswrapper[4792]: E1202 18:37:33.539812 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.628768 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.628855 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.628872 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.628899 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.628921 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.732634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.732721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.732743 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.732772 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.732793 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.836081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.836227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.836258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.836307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.836339 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.939885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.939989 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.940018 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.940049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:33 crc kubenswrapper[4792]: I1202 18:37:33.940068 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:33Z","lastTransitionTime":"2025-12-02T18:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.050108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.050207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.050225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.050250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.050269 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.153735 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.153810 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.153824 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.153842 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.153874 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.257242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.257324 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.257344 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.257375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.257397 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.363252 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.363316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.363329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.363348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.363401 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.466916 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.467003 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.467029 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.467061 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.467087 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.538995 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.539131 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:34 crc kubenswrapper[4792]: E1202 18:37:34.539345 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.539385 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:34 crc kubenswrapper[4792]: E1202 18:37:34.539552 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:34 crc kubenswrapper[4792]: E1202 18:37:34.539852 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.540808 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:37:34 crc kubenswrapper[4792]: E1202 18:37:34.541071 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.558132 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.572844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.572919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.572938 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.572968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.572987 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.578327 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.614488 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:19Z\\\",\\\"message\\\":\\\" ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006ec7d17 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 18:37:18.318153 6766 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:37:18.318284 6766 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 18:37:18.318136 6766 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:37:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.639108 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.662264 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.675945 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.676025 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.676048 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.676080 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.676103 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.684116 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.711490 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.729240 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.748193 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.781164 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.781257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.781276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.781306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.781323 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.787026 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99e48094-7461-4e02-9b8d-cf968bd0df50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e51d1dd753c543a66e8121c8221b153bb20949a62b1a227cc9323a48d94e434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a0bc12852e3244f38a9854aedc0f74d086ae9dee69dd4d6cd773f743ad7cdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://21a864cca8df919277e92dddd034772dd25991a778cb435aa9977983f19595d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://116adfaaadbeb48f2632afac2d1678cf3833bbbeea42356d477d7ae205aa621a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://937c26cbc0b38b347cdaf65a9e03413bde8eebda56f33830dd20f41513004111\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.805478 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.830954 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.854846 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.873796 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.884759 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.884815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.884829 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.884846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.884908 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.893937 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.912192 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.935266 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67aab251-4b2c-4486-bb99-93f0345fe2ce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dabf551bfcd95fbdaf2d0159d4f9f87164313d9d7646303805a4be08a470b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.957362 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.980680 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:34Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.987967 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.988042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.988062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.988092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:34 crc kubenswrapper[4792]: I1202 18:37:34.988112 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:34Z","lastTransitionTime":"2025-12-02T18:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.091101 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.091170 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.091184 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.091206 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.091220 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.193867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.193935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.193948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.193972 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.193986 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.297459 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.297517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.297578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.297602 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.297619 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.401819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.401892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.401910 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.401936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.401955 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.505346 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.505408 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.505419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.505439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.505451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.540921 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:35 crc kubenswrapper[4792]: E1202 18:37:35.541191 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.609986 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.610035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.610045 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.610062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.610077 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.713885 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.713973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.713985 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.714009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.714029 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.817568 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.817643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.817659 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.817684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.817702 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.921662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.921746 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.921767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.921801 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:35 crc kubenswrapper[4792]: I1202 18:37:35.921823 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:35Z","lastTransitionTime":"2025-12-02T18:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.026401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.026462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.026487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.026514 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.026557 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.132051 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.132165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.132192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.132233 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.132266 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.242893 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.243021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.243043 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.243074 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.243124 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.346683 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.346762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.346784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.346812 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.346832 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.451162 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.451259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.451281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.451317 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.451340 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.538978 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.539084 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.539216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:36 crc kubenswrapper[4792]: E1202 18:37:36.539408 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:36 crc kubenswrapper[4792]: E1202 18:37:36.539688 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:36 crc kubenswrapper[4792]: E1202 18:37:36.539953 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.554983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.555042 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.555072 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.555104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.555128 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.658760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.658830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.658847 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.658873 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.658894 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.763711 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.763767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.763785 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.763807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.763819 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.867410 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.867473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.867491 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.867557 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.867579 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.970188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.970258 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.970276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.970306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:36 crc kubenswrapper[4792]: I1202 18:37:36.970328 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:36Z","lastTransitionTime":"2025-12-02T18:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.073225 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.073286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.073298 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.073316 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.073331 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.176310 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.176366 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.176377 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.176397 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.176413 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.279836 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.279915 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.279935 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.279960 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.279986 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.382199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.382237 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.382245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.382260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.382272 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.485011 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.485060 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.485073 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.485091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.485104 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.539179 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:37 crc kubenswrapper[4792]: E1202 18:37:37.539357 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.587855 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.587900 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.587909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.587923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.587936 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.693101 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.693168 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.693188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.693212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.693231 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.796284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.796356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.796375 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.796401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.796419 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.900066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.900131 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.900148 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.900171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:37 crc kubenswrapper[4792]: I1202 18:37:37.900188 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:37Z","lastTransitionTime":"2025-12-02T18:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.003701 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.003952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.003983 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.004020 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.004046 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.107648 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.107704 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.107722 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.107745 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.107766 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.210203 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.210303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.210318 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.210335 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.210350 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.314227 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.314301 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.314319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.314348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.314366 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.418431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.418492 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.418508 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.418550 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.418567 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.522093 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.522160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.522180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.522208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.522232 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.538747 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:38 crc kubenswrapper[4792]: E1202 18:37:38.538872 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.538910 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.539074 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:38 crc kubenswrapper[4792]: E1202 18:37:38.539214 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:38 crc kubenswrapper[4792]: E1202 18:37:38.539314 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.624973 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.625079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.625120 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.625157 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.625179 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.729160 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.729200 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.729209 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.729224 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.729233 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.832986 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.833069 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.833086 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.833109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.833126 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.941167 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.941242 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.941265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.941290 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:38 crc kubenswrapper[4792]: I1202 18:37:38.941309 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:38Z","lastTransitionTime":"2025-12-02T18:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.046856 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.046941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.046961 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.046990 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.047012 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.150670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.150716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.150724 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.150737 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.150747 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.254035 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.254097 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.254109 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.254129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.254142 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.357770 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.357819 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.357830 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.357850 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.357865 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.461400 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.461483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.461510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.461589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.461617 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.538807 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:39 crc kubenswrapper[4792]: E1202 18:37:39.539040 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.560884 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.565567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.565647 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.565685 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.565707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.565719 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.578853 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.597179 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.612895 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.631861 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.668404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.668480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.668498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.668558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.668579 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.669975 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:19Z\\\",\\\"message\\\":\\\" ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006ec7d17 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 18:37:18.318153 6766 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:37:18.318284 6766 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 18:37:18.318136 6766 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:37:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.708050 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99e48094-7461-4e02-9b8d-cf968bd0df50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e51d1dd753c543a66e8121c8221b153bb20949a62b1a227cc9323a48d94e434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a0bc12852e3244f38a9854aedc0f74d086ae9dee69dd4d6cd773f743ad7cdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://21a864cca8df919277e92dddd034772dd25991a778cb435aa9977983f19595d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://116adfaaadbeb48f2632afac2d1678cf3833bbbeea42356d477d7ae205aa621a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://937c26cbc0b38b347cdaf65a9e03413bde8eebda56f33830dd20f41513004111\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.725739 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.751986 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.771150 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.771480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.771603 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.771624 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.771657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.771678 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.787643 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.814242 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.839922 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.861642 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.874684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.874749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.874767 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.874792 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.874812 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.880917 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.896731 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67aab251-4b2c-4486-bb99-93f0345fe2ce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dabf551bfcd95fbdaf2d0159d4f9f87164313d9d7646303805a4be08a470b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.912593 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.933863 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.955614 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:39Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.978319 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.978367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.978391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.978415 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:39 crc kubenswrapper[4792]: I1202 18:37:39.978434 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:39Z","lastTransitionTime":"2025-12-02T18:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.082192 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.082289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.082306 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.082327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.082343 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.185181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.185291 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.185461 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.185483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.185495 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.288406 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.288494 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.288517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.288579 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.288603 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.391671 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.391742 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.391762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.391793 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.391814 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.494442 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.494496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.494507 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.494558 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.494570 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.538755 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:40 crc kubenswrapper[4792]: E1202 18:37:40.538938 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.539182 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:40 crc kubenswrapper[4792]: E1202 18:37:40.539226 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.539342 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:40 crc kubenswrapper[4792]: E1202 18:37:40.539384 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.597145 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.597179 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.597205 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.597221 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.597229 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.699893 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.699941 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.699950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.699965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.699978 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.802487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.802561 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.802574 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.802589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.802602 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.904696 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.904744 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.904754 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.904769 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:40 crc kubenswrapper[4792]: I1202 18:37:40.904780 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:40Z","lastTransitionTime":"2025-12-02T18:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.007566 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.007640 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.007657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.007680 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.007698 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.111294 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.111372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.111395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.111431 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.111451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.215426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.215513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.215570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.215604 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.215705 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.318358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.318416 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.318429 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.318447 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.318460 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.422438 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.422515 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.422570 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.422600 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.422619 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.525851 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.525911 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.525928 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.525950 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.525970 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.539778 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:41 crc kubenswrapper[4792]: E1202 18:37:41.540073 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.629276 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.629345 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.629362 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.629388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.629407 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.732809 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.732887 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.732907 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.732940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.732960 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.836070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.836108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.836116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.836129 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.836140 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.940092 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.940171 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.940193 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.940222 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:41 crc kubenswrapper[4792]: I1202 18:37:41.940240 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:41Z","lastTransitionTime":"2025-12-02T18:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.044399 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.044480 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.044501 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.044560 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.044586 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.147858 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.147919 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.147939 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.147962 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.147981 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.251228 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.251314 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.252075 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.252196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.252286 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.355376 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.355458 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.355479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.355510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.355562 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.458289 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.458352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.458374 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.458407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.458429 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.488125 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.488202 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.488226 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.488262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.488288 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.503710 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.510330 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.510391 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.510401 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.510419 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.510431 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.526866 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.531407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.531474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.531483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.531498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.531512 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.539316 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.539339 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.539362 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.539457 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.539709 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.539929 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.556204 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.561439 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.561553 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.561582 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.561614 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.561641 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.577878 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.582657 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.582728 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.582751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.582782 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.582866 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.603844 4792 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1ed8f756-400f-4462-b5a1-c3a97e79306e\\\",\\\"systemUUID\\\":\\\"4d48cf4e-c99d-43e6-acd7-ad269c0425b2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:42Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:42 crc kubenswrapper[4792]: E1202 18:37:42.603976 4792 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.605407 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.605467 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.605486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.605510 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.605557 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.707909 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.707976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.707993 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.708022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.708043 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.811088 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.811155 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.811172 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.811198 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.811216 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.914844 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.914904 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.914921 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.914949 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:42 crc kubenswrapper[4792]: I1202 18:37:42.914969 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:42Z","lastTransitionTime":"2025-12-02T18:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.019115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.019215 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.019239 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.019278 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.019303 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.123219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.123286 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.123303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.123332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.123354 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.226208 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.226298 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.226322 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.226352 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.226377 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.330141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.330216 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.330241 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.330273 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.330296 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.434009 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.434091 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.434115 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.434143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.434161 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.537875 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.537948 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.537968 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.537994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.538012 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.538976 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:43 crc kubenswrapper[4792]: E1202 18:37:43.539230 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.641004 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.641110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.641161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.641180 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.641195 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.744367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.744420 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.744433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.744449 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.744461 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.850174 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.850232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.850244 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.850265 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.850279 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.954027 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.954098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.954110 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.954130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:43 crc kubenswrapper[4792]: I1202 18:37:43.954143 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:43Z","lastTransitionTime":"2025-12-02T18:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.056549 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.056620 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.056638 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.056670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.056693 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.159234 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.159308 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.159327 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.159395 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.159415 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.262412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.262465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.262484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.262511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.262567 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.366154 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.366243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.366260 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.366285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.366307 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.469007 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.469055 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.469074 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.469094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.469106 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.539034 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.539132 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.539061 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:44 crc kubenswrapper[4792]: E1202 18:37:44.539266 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:44 crc kubenswrapper[4792]: E1202 18:37:44.539422 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:44 crc kubenswrapper[4792]: E1202 18:37:44.539609 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.572062 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.572114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.572130 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.572150 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.572165 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.675245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.675331 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.675357 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.675387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.675413 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.779298 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.779367 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.779388 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.779418 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.779437 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.882881 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.882934 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.882946 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.882965 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.882977 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.986589 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.986653 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.986673 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.986699 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:44 crc kubenswrapper[4792]: I1202 18:37:44.986719 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:44Z","lastTransitionTime":"2025-12-02T18:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.092649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.092788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.092814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.092847 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.092874 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.197204 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.197268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.197285 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.197315 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.197334 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.302404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.302464 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.302483 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.302511 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.302561 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.405978 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.406049 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.406068 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.406095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.406113 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.510021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.510098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.510116 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.510142 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.510159 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.539415 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:45 crc kubenswrapper[4792]: E1202 18:37:45.539640 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.613762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.613828 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.613847 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.613871 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.613903 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.717432 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.717500 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.717517 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.717684 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.717707 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.821079 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.821132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.821151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.821176 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.821199 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.924329 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.924425 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.924454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.924487 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:45 crc kubenswrapper[4792]: I1202 18:37:45.924510 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:45Z","lastTransitionTime":"2025-12-02T18:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.028353 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.028438 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.028456 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.028490 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.028513 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.131360 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.131444 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.131469 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.131498 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.131564 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.235358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.235424 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.235443 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.235468 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.235488 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.338932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.338995 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.339013 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.339037 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.339056 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.442190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.442262 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.442280 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.442307 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.442327 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.539461 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.539571 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.539834 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:46 crc kubenswrapper[4792]: E1202 18:37:46.540040 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:46 crc kubenswrapper[4792]: E1202 18:37:46.540190 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:46 crc kubenswrapper[4792]: E1202 18:37:46.540377 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.546662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.546760 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.546783 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.546815 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.546837 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.650297 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.650380 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.650399 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.650428 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.650455 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.754283 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.754385 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.754404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.754433 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.754451 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.858001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.858063 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.858081 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.858104 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.858123 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.962440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.962496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.962513 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.962578 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:46 crc kubenswrapper[4792]: I1202 18:37:46.962618 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:46Z","lastTransitionTime":"2025-12-02T18:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.066128 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.066190 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.066211 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.066243 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.066267 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.169481 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.169576 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.169594 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.169618 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.169637 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.272923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.273006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.273030 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.273066 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.273092 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.376230 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.376808 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.377001 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.377259 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.377493 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.480364 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.480426 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.480440 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.480462 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.480477 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.539255 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:47 crc kubenswrapper[4792]: E1202 18:37:47.540099 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.582846 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.582913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.582932 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.582956 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.582976 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.648675 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:47 crc kubenswrapper[4792]: E1202 18:37:47.648862 4792 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:37:47 crc kubenswrapper[4792]: E1202 18:37:47.648964 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs podName:778806a7-7e6f-4776-8233-b42b296ebc52 nodeName:}" failed. No retries permitted until 2025-12-02 18:38:51.648944015 +0000 UTC m=+162.421836343 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs") pod "network-metrics-daemon-2ls4m" (UID: "778806a7-7e6f-4776-8233-b42b296ebc52") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.685813 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.686311 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.686457 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.686639 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.686855 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.790271 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.790348 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.790372 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.790411 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.790438 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.893165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.893269 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.893293 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.893332 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.893353 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.997303 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.997358 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.997374 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.997396 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:47 crc kubenswrapper[4792]: I1202 18:37:47.997413 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:47Z","lastTransitionTime":"2025-12-02T18:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.100866 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.100964 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.100988 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.101023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.101053 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.205199 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.205257 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.205266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.205282 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.205293 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.308022 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.308094 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.308108 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.308132 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.308148 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.410852 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.410914 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.410930 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.410952 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.410969 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.514146 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.514196 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.514210 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.514232 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.514249 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.538734 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.538780 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.538811 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:48 crc kubenswrapper[4792]: E1202 18:37:48.539392 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:48 crc kubenswrapper[4792]: E1202 18:37:48.539578 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:48 crc kubenswrapper[4792]: E1202 18:37:48.539733 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.617356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.617412 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.617427 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.617450 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.617471 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.721370 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.721477 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.721502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.721573 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.721598 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.824387 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.824465 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.824488 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.824565 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.824591 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.927693 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.927751 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.927762 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.927781 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:48 crc kubenswrapper[4792]: I1202 18:37:48.927793 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:48Z","lastTransitionTime":"2025-12-02T18:37:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.030662 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.030707 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.030718 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.030733 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.030743 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.133721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.133789 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.133805 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.133834 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.133852 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.237165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.237245 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.237268 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.237300 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.237324 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.341065 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.341143 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.341161 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.341181 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.341218 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.444144 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.444189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.444201 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.444218 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.444231 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.539153 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:49 crc kubenswrapper[4792]: E1202 18:37:49.539626 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.539941 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:37:49 crc kubenswrapper[4792]: E1202 18:37:49.540109 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.547774 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.547849 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.547867 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.547891 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.547911 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.559122 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l66ss" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69235e1-c1ab-41e3-af2c-14b956c6c37b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddaff8d1d17ef199fd8e33d481a528de305f885915e7db4033dc0e64a94f669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ad7ba54dd04f14f1833b1aa12ddfd599553966eca78916b9c527ecf7c3b6fff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d11c01fce5b7dd42f3fcb23543d14d0456d624453f5322ec0349292d41ffcb63\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae480e401151f9e5a9c0fdc682bc5c1103901fe75206e49f208cc1e47ffa7648\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6316d60cecb7a4597fbbe0f7eac39c1de37e734716f6132a65c9efaa5fa43ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://740346cd0da412829f13b5f2b9e05a0d504b5e467c0d6d6fac992f3eaaa5f25b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b14345c1bec111318cfbd8b89d93380476c3a807fcd3cd8e86fd8eaf9d8930d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9ztfg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l66ss\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.573260 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dw25w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6925e194-2dc8-4a3a-aa76-8db41ff27997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:15Z\\\",\\\"message\\\":\\\"2025-12-02T18:36:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497\\\\n2025-12-02T18:36:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b44dcf89-3d61-4fbd-8ed1-5b2337cb0497 to /host/opt/cni/bin/\\\\n2025-12-02T18:36:30Z [verbose] multus-daemon started\\\\n2025-12-02T18:36:30Z [verbose] Readiness Indicator file check\\\\n2025-12-02T18:37:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:37:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tjt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dw25w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.584810 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qrlwg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85a6200e-64b5-4e6b-bd19-933e3b576bfa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3415f5b847f72aad5d88625823e8241334b103ab38a6450f9118b46058e96b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jt4k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:31Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qrlwg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.597315 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"778806a7-7e6f-4776-8233-b42b296ebc52\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcjpr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-2ls4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.632800 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99e48094-7461-4e02-9b8d-cf968bd0df50\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e51d1dd753c543a66e8121c8221b153bb20949a62b1a227cc9323a48d94e434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a0bc12852e3244f38a9854aedc0f74d086ae9dee69dd4d6cd773f743ad7cdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://21a864cca8df919277e92dddd034772dd25991a778cb435aa9977983f19595d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://116adfaaadbeb48f2632afac2d1678cf3833bbbeea42356d477d7ae205aa621a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://937c26cbc0b38b347cdaf65a9e03413bde8eebda56f33830dd20f41513004111\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5fd1f3a58e7ef15402b3c0ce62ee49f54b9973801e4df0e3699c2c2ec5f78342\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a0b2870ef699525b38d94f21951da305874af4208a990da2ada7c0332866d36\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95d756b6ee6fb88c93512a94280783b3558cb8ff1fcc253200706cb74e08870d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.651772 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l7jxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"da09a4ee-4e22-4396-a352-7bcb2b89db73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e9641526f236df1982b46cbfc0583af3bc551c9ddff5bdf2006a5d6c075307\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nqc7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l7jxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.651826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.652114 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.652153 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.652189 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.652209 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.675122 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2adea4f-f2e8-4311-8d3b-e720e68530eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f39043dbb211b12cfe20e988fa0b943cefec51e7d2927e94d1cbb9b56c999038\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93a7fa91b532301637883d482e698574ab677db991d76698cffefb28f4e8b9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bgfcx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nvcwt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.697770 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9668059a-1772-400b-b2ad-86aa3d306dd9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T18:36:27Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 18:36:22.337074 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 18:36:22.338216 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179137523/tls.crt::/tmp/serving-cert-2179137523/tls.key\\\\\\\"\\\\nI1202 18:36:27.868924 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 18:36:27.874780 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 18:36:27.874832 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 18:36:27.874861 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 18:36:27.874872 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 18:36:27.886080 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1202 18:36:27.886174 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1202 18:36:27.886187 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886221 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 18:36:27.886240 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 18:36:27.886250 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 18:36:27.886257 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 18:36:27.886264 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1202 18:36:27.889226 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.713568 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.730563 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5777c8afe286becbd127ff2f1cc7617adff9e95a8e4061f433134bf749fa89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.750708 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8537808b344a3fb20a07a25558069d571f132b5a1004781b9efc0ed4d76de64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb24f25d6ddc35bcd113c137145f60606cbe656c7561eab446a5d8f3b9f5c8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.755705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.755814 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.755841 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.755883 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.755909 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.767631 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67aab251-4b2c-4486-bb99-93f0345fe2ce\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0dabf551bfcd95fbdaf2d0159d4f9f87164313d9d7646303805a4be08a470b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://387ae2df2ebffc4d68366fe6c589499cd68be2abdc6d24ee942a103f4d884c3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.789677 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.813336 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.833264 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63bfb707ccc4df1bf37c121b2c44c2e74bbd4c0ba5daec8a6b5e9b0d91c0f6d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.848173 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc533e72d865a8b2ece2e1b3ac15c56f873c2370c482ec0d1b683b343733cb06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8j2px\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-wpdh4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.865077 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.865151 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.865165 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.865188 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.865203 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.877078 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:29Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T18:37:19Z\\\",\\\"message\\\":\\\" ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006ec7d17 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https-metrics,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: catalog-operator,},ClusterIP:10.217.5.204,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.204],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 18:37:18.318153 6766 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1202 18:37:18.318284 6766 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 18:37:18.318136 6766 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machin\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T18:37:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4jhb5_openshift-ovn-kubernetes(2f79c130-fb71-4e1c-9e2d-ef492a0acb04)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vd5d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4jhb5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.894708 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d1917fa-6107-4248-a5a8-a868f9db2814\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2807cb4941b1acf09fa726b8a0232569ca9fb776560cd706c107f81cde5e9129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a21456483c25fc668cfcd2381d338c536965fb207de5645c820d4a26ffa866\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab788711c9d9f79bfae65616b47587a6108a2c981566af27e1eaa865083df46d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.909458 4792 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d6409c0d-877b-444d-a794-4ab8255b9099\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:37:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T18:36:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62cbd6fd0a3a47c0baa3bc776b5a2e2ba638de1954d203b7111ec77f3106d32e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a81a880a60fc5626604b7c76035a053fccf22cd190fafdda123fe503d891a21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d8ef45a3f94d51a7c9260ccd53fa15a9790303e7f7de95a1b166384fca992b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T18:36:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://766ea0daaf38f994a9e8ec7d4e8c872f667ddb4e651aa3955cc08139be85842d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T18:36:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T18:36:10Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T18:36:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T18:37:49Z is after 2025-08-24T17:21:41Z" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.968936 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.968994 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.969008 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.969028 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:49 crc kubenswrapper[4792]: I1202 18:37:49.969042 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:49Z","lastTransitionTime":"2025-12-02T18:37:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.072413 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.072473 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.072484 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.072502 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.072514 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.175788 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.175826 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.175837 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.175857 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.175871 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.279139 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.279182 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.279191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.279207 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.279217 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.382976 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.383047 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.383070 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.383099 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.383121 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.487006 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.487404 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.487496 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.487630 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.487718 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.539409 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.539612 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.539687 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:50 crc kubenswrapper[4792]: E1202 18:37:50.539817 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:50 crc kubenswrapper[4792]: E1202 18:37:50.540077 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:50 crc kubenswrapper[4792]: E1202 18:37:50.540149 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.590567 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.590622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.590641 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.590667 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.590686 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.693679 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.693757 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.693779 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.693806 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.693830 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.797141 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.797485 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.797717 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.797884 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.798007 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.901178 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.901263 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.901281 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.901309 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:50 crc kubenswrapper[4792]: I1202 18:37:50.901327 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:50Z","lastTransitionTime":"2025-12-02T18:37:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.010098 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.010173 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.010191 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.010219 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.010282 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.113471 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.113533 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.113546 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.113565 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.113575 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.217716 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.217784 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.217803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.217831 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.217853 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.320632 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.320710 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.320727 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.320754 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.320774 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.424402 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.424454 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.424463 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.424486 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.424500 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.527361 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.527435 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.527455 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.527479 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.527497 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.538933 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:51 crc kubenswrapper[4792]: E1202 18:37:51.539147 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.632250 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.632302 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.632320 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.632344 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.632361 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.735497 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.735622 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.735643 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.735670 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.735690 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.839661 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.839755 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.839776 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.839807 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.839825 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.943095 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.943212 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.943236 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.943266 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:51 crc kubenswrapper[4792]: I1202 18:37:51.943283 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:51Z","lastTransitionTime":"2025-12-02T18:37:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.047403 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.047474 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.047495 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.047549 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.047574 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.151940 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.152002 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.152021 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.152067 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.152086 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.256892 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.256943 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.256957 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.256979 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.256993 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.360649 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.360753 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.360777 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.360803 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.360857 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.469255 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.469337 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.469356 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.469385 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.469405 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.539312 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.539373 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.539407 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:52 crc kubenswrapper[4792]: E1202 18:37:52.539509 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:52 crc kubenswrapper[4792]: E1202 18:37:52.539642 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:52 crc kubenswrapper[4792]: E1202 18:37:52.539902 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.572634 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.572698 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.572721 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.572749 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.572773 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.675882 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.675913 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.675923 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.675937 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.675946 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.778981 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.779015 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.779023 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.779036 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.779044 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.882284 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.882334 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.882344 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.882361 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.882371 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.957705 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.957761 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.957775 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.957794 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 18:37:52 crc kubenswrapper[4792]: I1202 18:37:52.957808 4792 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T18:37:52Z","lastTransitionTime":"2025-12-02T18:37:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.025991 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k"] Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.026489 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.028881 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.029027 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.029070 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.029091 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.074896 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-l66ss" podStartSLOduration=85.074849674 podStartE2EDuration="1m25.074849674s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.052123604 +0000 UTC m=+103.825015942" watchObservedRunningTime="2025-12-02 18:37:53.074849674 +0000 UTC m=+103.847742032" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.083073 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-dw25w" podStartSLOduration=85.083060811 podStartE2EDuration="1m25.083060811s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.072926064 +0000 UTC m=+103.845818392" watchObservedRunningTime="2025-12-02 18:37:53.083060811 +0000 UTC m=+103.855953169" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.093258 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-qrlwg" podStartSLOduration=85.09323257 podStartE2EDuration="1m25.09323257s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.083900074 +0000 UTC m=+103.856792402" watchObservedRunningTime="2025-12-02 18:37:53.09323257 +0000 UTC m=+103.866124898" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.106483 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dcab3713-675f-45ef-a374-e18ef801b76b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.106565 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dcab3713-675f-45ef-a374-e18ef801b76b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.106586 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcab3713-675f-45ef-a374-e18ef801b76b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.106622 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcab3713-675f-45ef-a374-e18ef801b76b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.106659 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dcab3713-675f-45ef-a374-e18ef801b76b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.118763 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=23.118738184 podStartE2EDuration="23.118738184s" podCreationTimestamp="2025-12-02 18:37:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.118334373 +0000 UTC m=+103.891226701" watchObservedRunningTime="2025-12-02 18:37:53.118738184 +0000 UTC m=+103.891630512" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.128945 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-l7jxh" podStartSLOduration=85.128932203 podStartE2EDuration="1m25.128932203s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.1284121 +0000 UTC m=+103.901304428" watchObservedRunningTime="2025-12-02 18:37:53.128932203 +0000 UTC m=+103.901824531" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.149096 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nvcwt" podStartSLOduration=84.149078016 podStartE2EDuration="1m24.149078016s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.142750638 +0000 UTC m=+103.915642966" watchObservedRunningTime="2025-12-02 18:37:53.149078016 +0000 UTC m=+103.921970344" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.180838 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=85.180822234 podStartE2EDuration="1m25.180822234s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.180362692 +0000 UTC m=+103.953255050" watchObservedRunningTime="2025-12-02 18:37:53.180822234 +0000 UTC m=+103.953714562" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.207752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dcab3713-675f-45ef-a374-e18ef801b76b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.207834 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcab3713-675f-45ef-a374-e18ef801b76b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.207869 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dcab3713-675f-45ef-a374-e18ef801b76b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.207897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcab3713-675f-45ef-a374-e18ef801b76b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.207927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dcab3713-675f-45ef-a374-e18ef801b76b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.208017 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dcab3713-675f-45ef-a374-e18ef801b76b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.208115 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dcab3713-675f-45ef-a374-e18ef801b76b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.209104 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dcab3713-675f-45ef-a374-e18ef801b76b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.225494 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcab3713-675f-45ef-a374-e18ef801b76b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.238452 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcab3713-675f-45ef-a374-e18ef801b76b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-blk8k\" (UID: \"dcab3713-675f-45ef-a374-e18ef801b76b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.275889 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=24.275868265 podStartE2EDuration="24.275868265s" podCreationTimestamp="2025-12-02 18:37:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.274978222 +0000 UTC m=+104.047870550" watchObservedRunningTime="2025-12-02 18:37:53.275868265 +0000 UTC m=+104.048760593" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.327199 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podStartSLOduration=85.32715933 podStartE2EDuration="1m25.32715933s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.326953485 +0000 UTC m=+104.099845823" watchObservedRunningTime="2025-12-02 18:37:53.32715933 +0000 UTC m=+104.100051658" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.346039 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.406130 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=86.406108206 podStartE2EDuration="1m26.406108206s" podCreationTimestamp="2025-12-02 18:36:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.402435609 +0000 UTC m=+104.175327947" watchObservedRunningTime="2025-12-02 18:37:53.406108206 +0000 UTC m=+104.179000524" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.423786 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.423756812 podStartE2EDuration="52.423756812s" podCreationTimestamp="2025-12-02 18:37:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:53.420892417 +0000 UTC m=+104.193785445" watchObservedRunningTime="2025-12-02 18:37:53.423756812 +0000 UTC m=+104.196649160" Dec 02 18:37:53 crc kubenswrapper[4792]: I1202 18:37:53.539008 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:53 crc kubenswrapper[4792]: E1202 18:37:53.539220 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:54 crc kubenswrapper[4792]: I1202 18:37:54.186832 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" event={"ID":"dcab3713-675f-45ef-a374-e18ef801b76b","Type":"ContainerStarted","Data":"966e2b13740e836a1bc7bec78d043dac4854f111876fc1737ff7b41dfab5d579"} Dec 02 18:37:54 crc kubenswrapper[4792]: I1202 18:37:54.186926 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" event={"ID":"dcab3713-675f-45ef-a374-e18ef801b76b","Type":"ContainerStarted","Data":"757cb1bbbd7f377a6a7be41c793c2da4b9a9ae05262e331cca609aa9f74e9efa"} Dec 02 18:37:54 crc kubenswrapper[4792]: I1202 18:37:54.538792 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:54 crc kubenswrapper[4792]: I1202 18:37:54.538910 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:54 crc kubenswrapper[4792]: I1202 18:37:54.539030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:54 crc kubenswrapper[4792]: E1202 18:37:54.539066 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:54 crc kubenswrapper[4792]: E1202 18:37:54.539263 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:54 crc kubenswrapper[4792]: E1202 18:37:54.539455 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:55 crc kubenswrapper[4792]: I1202 18:37:55.538879 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:55 crc kubenswrapper[4792]: E1202 18:37:55.539089 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:56 crc kubenswrapper[4792]: I1202 18:37:56.538745 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:56 crc kubenswrapper[4792]: I1202 18:37:56.538806 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:56 crc kubenswrapper[4792]: E1202 18:37:56.539014 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:56 crc kubenswrapper[4792]: E1202 18:37:56.539237 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:56 crc kubenswrapper[4792]: I1202 18:37:56.539410 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:56 crc kubenswrapper[4792]: E1202 18:37:56.539601 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:57 crc kubenswrapper[4792]: I1202 18:37:57.539953 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:57 crc kubenswrapper[4792]: E1202 18:37:57.540282 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:37:58 crc kubenswrapper[4792]: I1202 18:37:58.539052 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:37:58 crc kubenswrapper[4792]: I1202 18:37:58.539117 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:37:58 crc kubenswrapper[4792]: I1202 18:37:58.539219 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:37:58 crc kubenswrapper[4792]: E1202 18:37:58.539612 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:37:58 crc kubenswrapper[4792]: E1202 18:37:58.539873 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:37:58 crc kubenswrapper[4792]: E1202 18:37:58.540589 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:37:59 crc kubenswrapper[4792]: I1202 18:37:59.539649 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:37:59 crc kubenswrapper[4792]: E1202 18:37:59.541742 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:00 crc kubenswrapper[4792]: I1202 18:38:00.539514 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:00 crc kubenswrapper[4792]: I1202 18:38:00.539636 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:00 crc kubenswrapper[4792]: E1202 18:38:00.539690 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:00 crc kubenswrapper[4792]: E1202 18:38:00.539900 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:00 crc kubenswrapper[4792]: I1202 18:38:00.540066 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:00 crc kubenswrapper[4792]: E1202 18:38:00.540347 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:01 crc kubenswrapper[4792]: I1202 18:38:01.540548 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:01 crc kubenswrapper[4792]: E1202 18:38:01.540772 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:01 crc kubenswrapper[4792]: I1202 18:38:01.541941 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.225587 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/3.log" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.229464 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerStarted","Data":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.229934 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.231420 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/1.log" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.231865 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/0.log" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.231910 4792 generic.go:334] "Generic (PLEG): container finished" podID="6925e194-2dc8-4a3a-aa76-8db41ff27997" containerID="58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797" exitCode=1 Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.231939 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerDied","Data":"58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797"} Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.231971 4792 scope.go:117] "RemoveContainer" containerID="c9e65c94ca8016f1dfcde2fc1262f2f78f150ada4796f7f2607221921bb3b805" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.232566 4792 scope.go:117] "RemoveContainer" containerID="58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797" Dec 02 18:38:02 crc kubenswrapper[4792]: E1202 18:38:02.232833 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-dw25w_openshift-multus(6925e194-2dc8-4a3a-aa76-8db41ff27997)\"" pod="openshift-multus/multus-dw25w" podUID="6925e194-2dc8-4a3a-aa76-8db41ff27997" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.268688 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podStartSLOduration=94.268669849 podStartE2EDuration="1m34.268669849s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:02.267746644 +0000 UTC m=+113.040639002" watchObservedRunningTime="2025-12-02 18:38:02.268669849 +0000 UTC m=+113.041562167" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.268904 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-blk8k" podStartSLOduration=94.268899685 podStartE2EDuration="1m34.268899685s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:37:54.213938449 +0000 UTC m=+104.986830817" watchObservedRunningTime="2025-12-02 18:38:02.268899685 +0000 UTC m=+113.041792013" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.480019 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2ls4m"] Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.480189 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:02 crc kubenswrapper[4792]: E1202 18:38:02.480330 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.539182 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.539216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:02 crc kubenswrapper[4792]: I1202 18:38:02.539225 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:02 crc kubenswrapper[4792]: E1202 18:38:02.539342 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:02 crc kubenswrapper[4792]: E1202 18:38:02.539571 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:02 crc kubenswrapper[4792]: E1202 18:38:02.539621 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:03 crc kubenswrapper[4792]: I1202 18:38:03.239765 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/1.log" Dec 02 18:38:04 crc kubenswrapper[4792]: I1202 18:38:04.539167 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:04 crc kubenswrapper[4792]: I1202 18:38:04.539219 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:04 crc kubenswrapper[4792]: E1202 18:38:04.539416 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:04 crc kubenswrapper[4792]: I1202 18:38:04.539455 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:04 crc kubenswrapper[4792]: I1202 18:38:04.539514 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:04 crc kubenswrapper[4792]: E1202 18:38:04.540209 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:04 crc kubenswrapper[4792]: E1202 18:38:04.539828 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:04 crc kubenswrapper[4792]: E1202 18:38:04.539741 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:06 crc kubenswrapper[4792]: I1202 18:38:06.539176 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:06 crc kubenswrapper[4792]: I1202 18:38:06.539226 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:06 crc kubenswrapper[4792]: I1202 18:38:06.539177 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:06 crc kubenswrapper[4792]: E1202 18:38:06.540661 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:06 crc kubenswrapper[4792]: E1202 18:38:06.540413 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:06 crc kubenswrapper[4792]: I1202 18:38:06.539360 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:06 crc kubenswrapper[4792]: E1202 18:38:06.540867 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:06 crc kubenswrapper[4792]: E1202 18:38:06.540992 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:08 crc kubenswrapper[4792]: I1202 18:38:08.538927 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:08 crc kubenswrapper[4792]: I1202 18:38:08.539008 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:08 crc kubenswrapper[4792]: I1202 18:38:08.539046 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:08 crc kubenswrapper[4792]: I1202 18:38:08.539021 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:08 crc kubenswrapper[4792]: E1202 18:38:08.539143 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:08 crc kubenswrapper[4792]: E1202 18:38:08.539306 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:08 crc kubenswrapper[4792]: E1202 18:38:08.539398 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:08 crc kubenswrapper[4792]: E1202 18:38:08.539442 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:09 crc kubenswrapper[4792]: E1202 18:38:09.564846 4792 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 02 18:38:09 crc kubenswrapper[4792]: E1202 18:38:09.641811 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:38:10 crc kubenswrapper[4792]: I1202 18:38:10.539819 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:10 crc kubenswrapper[4792]: I1202 18:38:10.539873 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:10 crc kubenswrapper[4792]: I1202 18:38:10.539915 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:10 crc kubenswrapper[4792]: I1202 18:38:10.539872 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:10 crc kubenswrapper[4792]: E1202 18:38:10.540069 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:10 crc kubenswrapper[4792]: E1202 18:38:10.540419 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:10 crc kubenswrapper[4792]: E1202 18:38:10.540503 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:10 crc kubenswrapper[4792]: E1202 18:38:10.540712 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:12 crc kubenswrapper[4792]: I1202 18:38:12.539733 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:12 crc kubenswrapper[4792]: I1202 18:38:12.539902 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:12 crc kubenswrapper[4792]: E1202 18:38:12.539975 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:12 crc kubenswrapper[4792]: I1202 18:38:12.540100 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:12 crc kubenswrapper[4792]: E1202 18:38:12.540205 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:12 crc kubenswrapper[4792]: I1202 18:38:12.540237 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:12 crc kubenswrapper[4792]: E1202 18:38:12.540413 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:12 crc kubenswrapper[4792]: E1202 18:38:12.540509 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:14 crc kubenswrapper[4792]: I1202 18:38:14.538774 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:14 crc kubenswrapper[4792]: I1202 18:38:14.538908 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:14 crc kubenswrapper[4792]: E1202 18:38:14.539015 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:14 crc kubenswrapper[4792]: I1202 18:38:14.539046 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:14 crc kubenswrapper[4792]: I1202 18:38:14.539085 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:14 crc kubenswrapper[4792]: E1202 18:38:14.539272 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:14 crc kubenswrapper[4792]: E1202 18:38:14.539397 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:14 crc kubenswrapper[4792]: E1202 18:38:14.539515 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:14 crc kubenswrapper[4792]: E1202 18:38:14.643243 4792 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:38:16 crc kubenswrapper[4792]: I1202 18:38:16.538923 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:16 crc kubenswrapper[4792]: I1202 18:38:16.538992 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:16 crc kubenswrapper[4792]: E1202 18:38:16.539188 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:16 crc kubenswrapper[4792]: I1202 18:38:16.539247 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:16 crc kubenswrapper[4792]: I1202 18:38:16.539275 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:16 crc kubenswrapper[4792]: E1202 18:38:16.539640 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:16 crc kubenswrapper[4792]: E1202 18:38:16.539871 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:16 crc kubenswrapper[4792]: E1202 18:38:16.539937 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:17 crc kubenswrapper[4792]: I1202 18:38:17.540612 4792 scope.go:117] "RemoveContainer" containerID="58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797" Dec 02 18:38:18 crc kubenswrapper[4792]: I1202 18:38:18.299743 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/1.log" Dec 02 18:38:18 crc kubenswrapper[4792]: I1202 18:38:18.300120 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerStarted","Data":"585fe554fcc83a4fa1c4bb6351183665390e1742e0bd51b90f2345444377b8c3"} Dec 02 18:38:18 crc kubenswrapper[4792]: I1202 18:38:18.539712 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:18 crc kubenswrapper[4792]: I1202 18:38:18.539767 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:18 crc kubenswrapper[4792]: I1202 18:38:18.539825 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:18 crc kubenswrapper[4792]: I1202 18:38:18.539922 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:18 crc kubenswrapper[4792]: E1202 18:38:18.540129 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 18:38:18 crc kubenswrapper[4792]: E1202 18:38:18.540279 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 18:38:18 crc kubenswrapper[4792]: E1202 18:38:18.540417 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 18:38:18 crc kubenswrapper[4792]: E1202 18:38:18.540622 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-2ls4m" podUID="778806a7-7e6f-4776-8233-b42b296ebc52" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.539322 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.539440 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.539454 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.539572 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.543906 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.543921 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.544013 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.544014 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.544359 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 18:38:20 crc kubenswrapper[4792]: I1202 18:38:20.544428 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 18:38:21 crc kubenswrapper[4792]: I1202 18:38:21.884250 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.656430 4792 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.706766 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wh7rt"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.707600 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.707733 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.708816 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.709314 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.710037 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.711028 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-szz8b"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.711403 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.712509 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.712772 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nnst7"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.713102 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.713164 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.713161 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.713800 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.720550 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.720969 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.721174 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.721350 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.729441 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.729707 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.729747 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.729803 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.729941 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.730371 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.730464 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.730481 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.730679 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.731039 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.731865 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.732428 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.732509 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.732616 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.732900 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.734021 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.734245 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.734440 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.734590 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.734775 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.734791 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.734922 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.735085 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.735168 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.735100 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.735628 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.736158 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.736359 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wzsvf"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.737163 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.737287 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hgblz"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.737889 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.739614 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.740253 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.740940 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.741647 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.742626 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.751057 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.754654 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nr8ml"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.755458 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.765662 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.767113 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-f89mv"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.768273 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.769060 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.780825 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.781034 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.781163 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.781297 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.781392 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.781485 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.781618 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796077 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796201 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796378 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796435 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796479 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796610 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796681 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796723 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796082 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796810 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.796869 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797009 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797116 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797284 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797452 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797506 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797587 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797725 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797756 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.797851 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798003 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798035 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798064 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798189 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798214 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798267 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798092 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798321 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798372 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wn9mc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798193 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798415 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798452 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798552 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798623 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798683 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798745 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798810 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798876 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798919 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-nplg6"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.798936 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.799003 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.799128 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.799259 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.799311 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.799966 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.800235 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.801659 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.803133 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.804892 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.805706 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.806277 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.808171 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.815209 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.815728 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-node-pullsecrets\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.815775 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-serving-cert\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.815800 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-profile-collector-cert\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.815821 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg555\" (UniqueName: \"kubernetes.io/projected/04246b55-2809-4c64-abaf-9bed254d0e80-kube-api-access-cg555\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.815836 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.815852 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thhn7\" (UniqueName: \"kubernetes.io/projected/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-kube-api-access-thhn7\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816047 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qvtq\" (UniqueName: \"kubernetes.io/projected/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-kube-api-access-4qvtq\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816086 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-encryption-config\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816151 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816181 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-audit-policies\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816200 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816216 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816244 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-config\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816272 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-config\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816297 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04246b55-2809-4c64-abaf-9bed254d0e80-audit-dir\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816320 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816351 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-service-ca-bundle\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816379 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s424v\" (UniqueName: \"kubernetes.io/projected/41739ae1-9772-4d87-8efd-6eeec54dadab-kube-api-access-s424v\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816449 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816498 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-client-ca\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816550 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/41739ae1-9772-4d87-8efd-6eeec54dadab-machine-approver-tls\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816570 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-config\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816588 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-serving-cert\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816627 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-config\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-etcd-ca\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816673 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsj56\" (UniqueName: \"kubernetes.io/projected/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-kube-api-access-tsj56\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816711 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f14e7b6b-9dda-45d2-a3d2-097c3432f736-trusted-ca\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816727 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816731 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59c54684-8c51-4e7b-975e-2511cf858db6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816802 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhfks\" (UniqueName: \"kubernetes.io/projected/139247ce-bdc4-46c8-8acb-0120504e8855-kube-api-access-zhfks\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816863 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816891 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-etcd-client\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816913 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-etcd-service-ca\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816959 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41739ae1-9772-4d87-8efd-6eeec54dadab-config\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.816979 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817042 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wtgr\" (UniqueName: \"kubernetes.io/projected/29fa6885-6526-4cdc-aace-fe3194157043-kube-api-access-8wtgr\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817065 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4hlp\" (UniqueName: \"kubernetes.io/projected/08f1469c-c4e8-4dab-a21e-730dc60f8ff2-kube-api-access-k4hlp\") pod \"cluster-samples-operator-665b6dd947-wmll4\" (UID: \"08f1469c-c4e8-4dab-a21e-730dc60f8ff2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817111 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/881670bf-7c7d-41c1-aedc-b6d9ba17b368-images\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817155 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/881670bf-7c7d-41c1-aedc-b6d9ba17b368-auth-proxy-config\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817186 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f14e7b6b-9dda-45d2-a3d2-097c3432f736-serving-cert\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817212 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817249 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa6885-6526-4cdc-aace-fe3194157043-serving-cert\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817284 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-audit\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817307 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhd5x\" (UniqueName: \"kubernetes.io/projected/59c54684-8c51-4e7b-975e-2511cf858db6-kube-api-access-dhd5x\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817327 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5ccs\" (UniqueName: \"kubernetes.io/projected/881670bf-7c7d-41c1-aedc-b6d9ba17b368-kube-api-access-v5ccs\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817376 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-image-import-ca\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817400 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-audit-dir\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817424 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59c54684-8c51-4e7b-975e-2511cf858db6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817472 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-etcd-serving-ca\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817493 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-serving-cert\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817607 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-config\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817706 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817753 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817822 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-encryption-config\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817855 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f14e7b6b-9dda-45d2-a3d2-097c3432f736-config\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817884 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/08f1469c-c4e8-4dab-a21e-730dc60f8ff2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wmll4\" (UID: \"08f1469c-c4e8-4dab-a21e-730dc60f8ff2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817927 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxlkf\" (UniqueName: \"kubernetes.io/projected/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-kube-api-access-vxlkf\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817946 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-audit-policies\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.817965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b019f11-935f-4956-aaf0-c6d2a5d66356-serving-cert\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818004 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/139247ce-bdc4-46c8-8acb-0120504e8855-audit-dir\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818070 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818093 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818117 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqhdw\" (UniqueName: \"kubernetes.io/projected/8b019f11-935f-4956-aaf0-c6d2a5d66356-kube-api-access-fqhdw\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818144 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818166 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818189 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818215 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-etcd-client\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818234 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-images\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818254 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmzhh\" (UniqueName: \"kubernetes.io/projected/f14e7b6b-9dda-45d2-a3d2-097c3432f736-kube-api-access-vmzhh\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818283 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41739ae1-9772-4d87-8efd-6eeec54dadab-auth-proxy-config\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818313 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/881670bf-7c7d-41c1-aedc-b6d9ba17b368-proxy-tls\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818334 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/29fa6885-6526-4cdc-aace-fe3194157043-etcd-client\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.818357 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-srv-cert\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.819267 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.819453 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.819770 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.819790 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.819947 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.820038 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.820073 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.820269 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.820462 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.820848 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.821452 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.821473 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.821912 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.821976 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.825299 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.849930 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.850694 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.852062 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nzgzn"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.856423 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.857771 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.859973 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.871662 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.871691 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.873663 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.874890 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-cds6s"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.875892 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.879165 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-jcpdc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.881885 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-jcpdc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.886100 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.889385 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.889797 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.893366 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.897400 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.898416 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.898580 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.899343 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.902043 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.903163 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.903606 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.903651 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.904490 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-d9jsl"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.905380 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.906030 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.906899 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.907172 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zs9kz"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.908737 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.908897 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f87xj"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.909319 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.909900 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.910194 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.911203 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.911633 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.911850 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.913306 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.913910 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.914363 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-szz8b"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.915663 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-72k72"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.916390 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.916965 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.917981 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.918251 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wzsvf"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919727 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919777 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919810 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919838 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-etcd-client\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919863 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-images\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919888 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmzhh\" (UniqueName: \"kubernetes.io/projected/f14e7b6b-9dda-45d2-a3d2-097c3432f736-kube-api-access-vmzhh\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919912 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41739ae1-9772-4d87-8efd-6eeec54dadab-auth-proxy-config\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919940 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/881670bf-7c7d-41c1-aedc-b6d9ba17b368-proxy-tls\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919970 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/29fa6885-6526-4cdc-aace-fe3194157043-etcd-client\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.919993 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-srv-cert\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920015 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-node-pullsecrets\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-serving-cert\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920063 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-profile-collector-cert\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920088 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg555\" (UniqueName: \"kubernetes.io/projected/04246b55-2809-4c64-abaf-9bed254d0e80-kube-api-access-cg555\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920126 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thhn7\" (UniqueName: \"kubernetes.io/projected/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-kube-api-access-thhn7\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920152 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qvtq\" (UniqueName: \"kubernetes.io/projected/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-kube-api-access-4qvtq\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920177 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-encryption-config\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920204 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920222 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-audit-policies\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920240 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920257 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-config\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920298 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-config\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920314 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04246b55-2809-4c64-abaf-9bed254d0e80-audit-dir\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920330 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920348 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-service-ca-bundle\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920366 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s424v\" (UniqueName: \"kubernetes.io/projected/41739ae1-9772-4d87-8efd-6eeec54dadab-kube-api-access-s424v\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920399 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920414 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-client-ca\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920430 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/41739ae1-9772-4d87-8efd-6eeec54dadab-machine-approver-tls\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920449 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-config\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920472 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-serving-cert\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920498 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-config\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920538 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-etcd-ca\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920557 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsj56\" (UniqueName: \"kubernetes.io/projected/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-kube-api-access-tsj56\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920573 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f14e7b6b-9dda-45d2-a3d2-097c3432f736-trusted-ca\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920590 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59c54684-8c51-4e7b-975e-2511cf858db6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920607 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhfks\" (UniqueName: \"kubernetes.io/projected/139247ce-bdc4-46c8-8acb-0120504e8855-kube-api-access-zhfks\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920636 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920652 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-etcd-client\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920683 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-etcd-service-ca\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920701 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41739ae1-9772-4d87-8efd-6eeec54dadab-config\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920718 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920737 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wtgr\" (UniqueName: \"kubernetes.io/projected/29fa6885-6526-4cdc-aace-fe3194157043-kube-api-access-8wtgr\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920754 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4hlp\" (UniqueName: \"kubernetes.io/projected/08f1469c-c4e8-4dab-a21e-730dc60f8ff2-kube-api-access-k4hlp\") pod \"cluster-samples-operator-665b6dd947-wmll4\" (UID: \"08f1469c-c4e8-4dab-a21e-730dc60f8ff2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920772 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/881670bf-7c7d-41c1-aedc-b6d9ba17b368-images\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920791 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/881670bf-7c7d-41c1-aedc-b6d9ba17b368-auth-proxy-config\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920806 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f14e7b6b-9dda-45d2-a3d2-097c3432f736-serving-cert\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920823 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920840 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa6885-6526-4cdc-aace-fe3194157043-serving-cert\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920857 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-audit\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920880 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhd5x\" (UniqueName: \"kubernetes.io/projected/59c54684-8c51-4e7b-975e-2511cf858db6-kube-api-access-dhd5x\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920899 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920937 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5ccs\" (UniqueName: \"kubernetes.io/projected/881670bf-7c7d-41c1-aedc-b6d9ba17b368-kube-api-access-v5ccs\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920958 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-image-import-ca\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920977 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-audit-dir\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.920994 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59c54684-8c51-4e7b-975e-2511cf858db6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-etcd-serving-ca\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921036 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-serving-cert\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921053 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921084 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-config\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921104 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921125 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921145 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-encryption-config\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921163 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f14e7b6b-9dda-45d2-a3d2-097c3432f736-config\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921182 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/08f1469c-c4e8-4dab-a21e-730dc60f8ff2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wmll4\" (UID: \"08f1469c-c4e8-4dab-a21e-730dc60f8ff2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921207 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxlkf\" (UniqueName: \"kubernetes.io/projected/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-kube-api-access-vxlkf\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921225 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-audit-policies\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921242 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b019f11-935f-4956-aaf0-c6d2a5d66356-serving-cert\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921265 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/139247ce-bdc4-46c8-8acb-0120504e8855-audit-dir\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921282 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921300 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.921318 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqhdw\" (UniqueName: \"kubernetes.io/projected/8b019f11-935f-4956-aaf0-c6d2a5d66356-kube-api-access-fqhdw\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.922105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59c54684-8c51-4e7b-975e-2511cf858db6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.922627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-audit-dir\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.926067 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.926762 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f14e7b6b-9dda-45d2-a3d2-097c3432f736-config\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.928100 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.928881 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-client-ca\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.930184 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/29fa6885-6526-4cdc-aace-fe3194157043-etcd-client\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.930598 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-images\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.932137 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.935941 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-etcd-service-ca\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.936005 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-etcd-serving-ca\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.936544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-config\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.936743 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.936825 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.937496 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41739ae1-9772-4d87-8efd-6eeec54dadab-config\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.937761 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-config\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.937815 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.937875 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-etcd-ca\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.939098 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f14e7b6b-9dda-45d2-a3d2-097c3432f736-trusted-ca\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.939105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-node-pullsecrets\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.939747 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-srv-cert\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940114 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-audit\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940143 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940122 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-audit-policies\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940174 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/139247ce-bdc4-46c8-8acb-0120504e8855-audit-dir\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940540 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41739ae1-9772-4d87-8efd-6eeec54dadab-auth-proxy-config\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940766 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04246b55-2809-4c64-abaf-9bed254d0e80-audit-dir\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940811 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-encryption-config\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.941325 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/59c54684-8c51-4e7b-975e-2511cf858db6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.941787 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-serving-cert\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.942155 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-service-ca-bundle\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.942208 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.942187 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/881670bf-7c7d-41c1-aedc-b6d9ba17b368-proxy-tls\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.942924 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.942945 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.940123 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29fa6885-6526-4cdc-aace-fe3194157043-config\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.943403 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.943418 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-config\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.943474 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/881670bf-7c7d-41c1-aedc-b6d9ba17b368-images\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.943580 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.943709 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-config\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.944322 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-image-import-ca\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.944514 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-jcpdc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.944578 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.948499 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-z9h9q"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.949272 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nr8ml"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.949295 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hgblz"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.949386 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.949603 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.949921 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/139247ce-bdc4-46c8-8acb-0120504e8855-audit-policies\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.949957 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-etcd-client\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.950327 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29fa6885-6526-4cdc-aace-fe3194157043-serving-cert\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.950331 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.950500 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.950541 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-encryption-config\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.951016 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.951227 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wh7rt"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.951318 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/08f1469c-c4e8-4dab-a21e-730dc60f8ff2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wmll4\" (UID: \"08f1469c-c4e8-4dab-a21e-730dc60f8ff2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.951957 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b019f11-935f-4956-aaf0-c6d2a5d66356-serving-cert\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.952283 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f14e7b6b-9dda-45d2-a3d2-097c3432f736-serving-cert\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.952627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-serving-cert\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.952893 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.953554 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/881670bf-7c7d-41c1-aedc-b6d9ba17b368-auth-proxy-config\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.954418 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-etcd-client\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.954480 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nzgzn"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.955937 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/41739ae1-9772-4d87-8efd-6eeec54dadab-machine-approver-tls\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.956550 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.957854 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nnst7"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.959100 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-profile-collector-cert\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.959500 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.959614 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.959622 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/139247ce-bdc4-46c8-8acb-0120504e8855-serving-cert\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.960704 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.961309 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.962439 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.963740 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.965273 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wn9mc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.966445 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-f89mv"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.968023 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.969495 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.969823 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.970847 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.970994 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.981389 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.984194 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.986224 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-tk4v8"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.987159 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.987711 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.989229 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.990193 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-nplg6"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.991890 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.995863 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-qjpwp"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.996580 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.997433 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.998632 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw"] Dec 02 18:38:23 crc kubenswrapper[4792]: I1202 18:38:23.999731 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.000890 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f87xj"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.001976 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.003037 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.004072 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-z9h9q"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.005166 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cds6s"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.006294 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.007408 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-tk4v8"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.008609 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-72k72"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.009750 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zs9kz"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.010969 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r6ncj"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.012127 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r6ncj"] Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.012175 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.014048 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.029432 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.050068 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.090029 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.110820 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.129489 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.150843 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.170585 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.190848 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.216690 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.236198 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.250890 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.270153 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.310583 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326145 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c586142b-c192-4706-9026-bcf666e8f7c6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326267 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-bound-sa-token\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326396 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fede595-7ee5-4aaf-999d-45eebf4ca097-serving-cert\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326464 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-registry-certificates\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326513 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpwrv\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-kube-api-access-cpwrv\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326658 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mb46\" (UniqueName: \"kubernetes.io/projected/ff2b39c3-c4a3-4d87-b4bd-474d89c04db7-kube-api-access-2mb46\") pod \"dns-operator-744455d44c-f89mv\" (UID: \"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7\") " pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326840 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-trusted-ca\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.326903 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd9cd\" (UniqueName: \"kubernetes.io/projected/5fede595-7ee5-4aaf-999d-45eebf4ca097-kube-api-access-gd9cd\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.327100 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-registry-tls\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.327170 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5fede595-7ee5-4aaf-999d-45eebf4ca097-available-featuregates\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.327317 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c586142b-c192-4706-9026-bcf666e8f7c6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.327383 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ff2b39c3-c4a3-4d87-b4bd-474d89c04db7-metrics-tls\") pod \"dns-operator-744455d44c-f89mv\" (UID: \"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7\") " pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.327463 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.328177 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:24.828149335 +0000 UTC m=+135.601041703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.330799 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.350779 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.370247 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.390208 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.415975 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.428492 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.428635 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:24.928617179 +0000 UTC m=+135.701509507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.428850 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-serving-cert\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.428874 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-apiservice-cert\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.428946 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-plugins-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429000 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/649496fd-1b8f-429a-be02-160ecc5b2ab9-signing-cabundle\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429017 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-config-volume\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429035 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ed6603d-effa-496b-9ed3-16918cb79b7b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429078 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c586142b-c192-4706-9026-bcf666e8f7c6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429099 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjxq7\" (UniqueName: \"kubernetes.io/projected/95d5c817-e302-4f46-9db2-333b21486a7c-kube-api-access-xjxq7\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429115 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-metrics-tls\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429135 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ff2b39c3-c4a3-4d87-b4bd-474d89c04db7-metrics-tls\") pod \"dns-operator-744455d44c-f89mv\" (UID: \"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7\") " pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429152 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-tmpfs\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429172 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e690e93c-92e0-4045-88b5-685763786d3a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429193 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/90e317c4-1ff5-45ef-8f05-bbe33bda8434-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429230 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ft28\" (UniqueName: \"kubernetes.io/projected/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-kube-api-access-5ft28\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429250 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q88jv\" (UniqueName: \"kubernetes.io/projected/93d19b8a-404d-4f49-8789-ad474970771a-kube-api-access-q88jv\") pod \"ingress-canary-tk4v8\" (UID: \"93d19b8a-404d-4f49-8789-ad474970771a\") " pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429302 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv9ls\" (UniqueName: \"kubernetes.io/projected/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-kube-api-access-tv9ls\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429321 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-metrics-certs\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429340 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e0e779-21ef-489e-8721-54533e24bf31-secret-volume\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429363 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/805371d5-27c1-4895-aab2-23e8e1d9a91b-srv-cert\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429382 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x69z\" (UniqueName: \"kubernetes.io/projected/9f54e180-97ba-4d10-9bda-7d38a5dab306-kube-api-access-8x69z\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429401 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ed6603d-effa-496b-9ed3-16918cb79b7b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429421 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhcht\" (UniqueName: \"kubernetes.io/projected/cff41058-c666-4dc1-a119-54f157de50b9-kube-api-access-rhcht\") pod \"downloads-7954f5f757-jcpdc\" (UID: \"cff41058-c666-4dc1-a119-54f157de50b9\") " pod="openshift-console/downloads-7954f5f757-jcpdc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429439 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmwjw\" (UniqueName: \"kubernetes.io/projected/ae2be54d-c2f0-427a-bccd-c63bad0a0f0e-kube-api-access-nmwjw\") pod \"package-server-manager-789f6589d5-rfct2\" (UID: \"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429481 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cprg\" (UniqueName: \"kubernetes.io/projected/e690e93c-92e0-4045-88b5-685763786d3a-kube-api-access-9cprg\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429500 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/93d19b8a-404d-4f49-8789-ad474970771a-cert\") pod \"ingress-canary-tk4v8\" (UID: \"93d19b8a-404d-4f49-8789-ad474970771a\") " pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429536 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjclj\" (UniqueName: \"kubernetes.io/projected/64e0e779-21ef-489e-8721-54533e24bf31-kube-api-access-mjclj\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429556 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-bound-sa-token\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429583 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd2gp\" (UniqueName: \"kubernetes.io/projected/455642ee-8453-433f-a998-ea30f5baadef-kube-api-access-qd2gp\") pod \"migrator-59844c95c7-r4dvq\" (UID: \"455642ee-8453-433f-a998-ea30f5baadef\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429604 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429624 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-mountpoint-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429663 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h4lm\" (UniqueName: \"kubernetes.io/projected/2f18da50-4c56-4f61-bcf4-583bec642127-kube-api-access-2h4lm\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429683 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44m86\" (UniqueName: \"kubernetes.io/projected/9053d7ed-27f1-470a-8164-6ef32c05ea87-kube-api-access-44m86\") pod \"control-plane-machine-set-operator-78cbb6b69f-phfft\" (UID: \"9053d7ed-27f1-470a-8164-6ef32c05ea87\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429700 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67v7d\" (UniqueName: \"kubernetes.io/projected/649496fd-1b8f-429a-be02-160ecc5b2ab9-kube-api-access-67v7d\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429721 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-trusted-ca-bundle\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-registry-certificates\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpwrv\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-kube-api-access-cpwrv\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429793 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48d276e-2b89-494b-bbf6-8471a336a7a0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429811 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-console-config\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429844 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3026143-1986-44ad-b5e7-fbae179a6503-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429881 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429896 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e690e93c-92e0-4045-88b5-685763786d3a-metrics-tls\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429931 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q75cx\" (UniqueName: \"kubernetes.io/projected/788d1186-ade1-4f2c-acaa-e0030baa277c-kube-api-access-q75cx\") pod \"multus-admission-controller-857f4d67dd-zs9kz\" (UID: \"788d1186-ade1-4f2c-acaa-e0030baa277c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429950 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5td6\" (UniqueName: \"kubernetes.io/projected/805371d5-27c1-4895-aab2-23e8e1d9a91b-kube-api-access-r5td6\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxznh\" (UniqueName: \"kubernetes.io/projected/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-kube-api-access-hxznh\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.429983 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpkhp\" (UniqueName: \"kubernetes.io/projected/3ed6603d-effa-496b-9ed3-16918cb79b7b-kube-api-access-fpkhp\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430002 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-serving-cert\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430023 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430047 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/788d1186-ade1-4f2c-acaa-e0030baa277c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zs9kz\" (UID: \"788d1186-ade1-4f2c-acaa-e0030baa277c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430076 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-node-bootstrap-token\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430093 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/90e317c4-1ff5-45ef-8f05-bbe33bda8434-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430110 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-client-ca\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430127 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kncrq\" (UniqueName: \"kubernetes.io/projected/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-kube-api-access-kncrq\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430173 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f54e180-97ba-4d10-9bda-7d38a5dab306-config\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430191 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlt8b\" (UniqueName: \"kubernetes.io/projected/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-kube-api-access-dlt8b\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430220 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-certs\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430240 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430259 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430259 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430449 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-registration-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430501 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-registry-tls\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430547 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5fede595-7ee5-4aaf-999d-45eebf4ca097-available-featuregates\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430573 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3026143-1986-44ad-b5e7-fbae179a6503-proxy-tls\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430591 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/805371d5-27c1-4895-aab2-23e8e1d9a91b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430610 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48d276e-2b89-494b-bbf6-8471a336a7a0-config\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430629 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk68j\" (UniqueName: \"kubernetes.io/projected/90e317c4-1ff5-45ef-8f05-bbe33bda8434-kube-api-access-bk68j\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430648 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e690e93c-92e0-4045-88b5-685763786d3a-trusted-ca\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.430689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-oauth-config\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.431224 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/9053d7ed-27f1-470a-8164-6ef32c05ea87-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-phfft\" (UID: \"9053d7ed-27f1-470a-8164-6ef32c05ea87\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.431259 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.431278 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-config\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.431306 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.431348 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-webhook-cert\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.431856 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:24.931848056 +0000 UTC m=+135.704740384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432335 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e0e779-21ef-489e-8721-54533e24bf31-config-volume\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432367 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-registry-certificates\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432370 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c586142b-c192-4706-9026-bcf666e8f7c6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432420 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bhzl\" (UniqueName: \"kubernetes.io/projected/a3026143-1986-44ad-b5e7-fbae179a6503-kube-api-access-5bhzl\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432438 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432457 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/90e317c4-1ff5-45ef-8f05-bbe33bda8434-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432479 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432572 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-service-ca-bundle\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432622 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqjb9\" (UniqueName: \"kubernetes.io/projected/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-kube-api-access-gqjb9\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432647 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c586142b-c192-4706-9026-bcf666e8f7c6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432662 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e48d276e-2b89-494b-bbf6-8471a336a7a0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432678 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/649496fd-1b8f-429a-be02-160ecc5b2ab9-signing-key\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432696 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-config\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432712 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432730 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fede595-7ee5-4aaf-999d-45eebf4ca097-serving-cert\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432822 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mb46\" (UniqueName: \"kubernetes.io/projected/ff2b39c3-c4a3-4d87-b4bd-474d89c04db7-kube-api-access-2mb46\") pod \"dns-operator-744455d44c-f89mv\" (UID: \"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7\") " pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432840 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-oauth-serving-cert\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432888 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-service-ca\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432905 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f54e180-97ba-4d10-9bda-7d38a5dab306-serving-cert\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432950 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9txdc\" (UniqueName: \"kubernetes.io/projected/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-kube-api-access-9txdc\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.432984 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-trusted-ca\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.433006 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.433051 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd9cd\" (UniqueName: \"kubernetes.io/projected/5fede595-7ee5-4aaf-999d-45eebf4ca097-kube-api-access-gd9cd\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.433072 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-stats-auth\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.434114 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5fede595-7ee5-4aaf-999d-45eebf4ca097-available-featuregates\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.438069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2be54d-c2f0-427a-bccd-c63bad0a0f0e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rfct2\" (UID: \"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.438200 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-socket-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.438284 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-csi-data-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.438409 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-default-certificate\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.438755 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-trusted-ca\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.438791 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-registry-tls\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.439443 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ff2b39c3-c4a3-4d87-b4bd-474d89c04db7-metrics-tls\") pod \"dns-operator-744455d44c-f89mv\" (UID: \"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7\") " pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.441885 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fede595-7ee5-4aaf-999d-45eebf4ca097-serving-cert\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.443934 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c586142b-c192-4706-9026-bcf666e8f7c6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.450490 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.471304 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.491897 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.510314 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.537772 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.539657 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.539911 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.039873612 +0000 UTC m=+135.812765971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540056 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-oauth-config\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540102 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/9053d7ed-27f1-470a-8164-6ef32c05ea87-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-phfft\" (UID: \"9053d7ed-27f1-470a-8164-6ef32c05ea87\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540154 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-config\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540289 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-webhook-cert\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540326 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e0e779-21ef-489e-8721-54533e24bf31-config-volume\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540362 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540468 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bhzl\" (UniqueName: \"kubernetes.io/projected/a3026143-1986-44ad-b5e7-fbae179a6503-kube-api-access-5bhzl\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540505 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.540553 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.04053843 +0000 UTC m=+135.813430758 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540666 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/90e317c4-1ff5-45ef-8f05-bbe33bda8434-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540695 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-service-ca-bundle\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540714 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqjb9\" (UniqueName: \"kubernetes.io/projected/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-kube-api-access-gqjb9\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540744 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e48d276e-2b89-494b-bbf6-8471a336a7a0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540762 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/649496fd-1b8f-429a-be02-160ecc5b2ab9-signing-key\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540785 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-config\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540809 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540843 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-oauth-serving-cert\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540874 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-service-ca\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540892 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f54e180-97ba-4d10-9bda-7d38a5dab306-serving-cert\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540919 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txdc\" (UniqueName: \"kubernetes.io/projected/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-kube-api-access-9txdc\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540936 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540968 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-stats-auth\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.540987 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2be54d-c2f0-427a-bccd-c63bad0a0f0e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rfct2\" (UID: \"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541005 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-socket-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-csi-data-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-default-certificate\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541088 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-serving-cert\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541105 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-apiservice-cert\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541128 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-plugins-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541146 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/649496fd-1b8f-429a-be02-160ecc5b2ab9-signing-cabundle\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541162 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-config-volume\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541179 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ed6603d-effa-496b-9ed3-16918cb79b7b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjxq7\" (UniqueName: \"kubernetes.io/projected/95d5c817-e302-4f46-9db2-333b21486a7c-kube-api-access-xjxq7\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541239 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-metrics-tls\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541257 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-tmpfs\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541274 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e690e93c-92e0-4045-88b5-685763786d3a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/90e317c4-1ff5-45ef-8f05-bbe33bda8434-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541311 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ft28\" (UniqueName: \"kubernetes.io/projected/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-kube-api-access-5ft28\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541332 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q88jv\" (UniqueName: \"kubernetes.io/projected/93d19b8a-404d-4f49-8789-ad474970771a-kube-api-access-q88jv\") pod \"ingress-canary-tk4v8\" (UID: \"93d19b8a-404d-4f49-8789-ad474970771a\") " pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541351 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv9ls\" (UniqueName: \"kubernetes.io/projected/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-kube-api-access-tv9ls\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541376 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-metrics-certs\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541394 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e0e779-21ef-489e-8721-54533e24bf31-secret-volume\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541412 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/805371d5-27c1-4895-aab2-23e8e1d9a91b-srv-cert\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541435 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x69z\" (UniqueName: \"kubernetes.io/projected/9f54e180-97ba-4d10-9bda-7d38a5dab306-kube-api-access-8x69z\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541458 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhcht\" (UniqueName: \"kubernetes.io/projected/cff41058-c666-4dc1-a119-54f157de50b9-kube-api-access-rhcht\") pod \"downloads-7954f5f757-jcpdc\" (UID: \"cff41058-c666-4dc1-a119-54f157de50b9\") " pod="openshift-console/downloads-7954f5f757-jcpdc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541481 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ed6603d-effa-496b-9ed3-16918cb79b7b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541504 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmwjw\" (UniqueName: \"kubernetes.io/projected/ae2be54d-c2f0-427a-bccd-c63bad0a0f0e-kube-api-access-nmwjw\") pod \"package-server-manager-789f6589d5-rfct2\" (UID: \"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541550 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cprg\" (UniqueName: \"kubernetes.io/projected/e690e93c-92e0-4045-88b5-685763786d3a-kube-api-access-9cprg\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541576 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/93d19b8a-404d-4f49-8789-ad474970771a-cert\") pod \"ingress-canary-tk4v8\" (UID: \"93d19b8a-404d-4f49-8789-ad474970771a\") " pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541605 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjclj\" (UniqueName: \"kubernetes.io/projected/64e0e779-21ef-489e-8721-54533e24bf31-kube-api-access-mjclj\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541648 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd2gp\" (UniqueName: \"kubernetes.io/projected/455642ee-8453-433f-a998-ea30f5baadef-kube-api-access-qd2gp\") pod \"migrator-59844c95c7-r4dvq\" (UID: \"455642ee-8453-433f-a998-ea30f5baadef\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541673 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-mountpoint-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541717 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h4lm\" (UniqueName: \"kubernetes.io/projected/2f18da50-4c56-4f61-bcf4-583bec642127-kube-api-access-2h4lm\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541740 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-trusted-ca-bundle\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541766 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44m86\" (UniqueName: \"kubernetes.io/projected/9053d7ed-27f1-470a-8164-6ef32c05ea87-kube-api-access-44m86\") pod \"control-plane-machine-set-operator-78cbb6b69f-phfft\" (UID: \"9053d7ed-27f1-470a-8164-6ef32c05ea87\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541791 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67v7d\" (UniqueName: \"kubernetes.io/projected/649496fd-1b8f-429a-be02-160ecc5b2ab9-kube-api-access-67v7d\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541799 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-plugins-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541827 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48d276e-2b89-494b-bbf6-8471a336a7a0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541850 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-console-config\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541871 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3026143-1986-44ad-b5e7-fbae179a6503-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541894 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541917 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e690e93c-92e0-4045-88b5-685763786d3a-metrics-tls\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541942 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5td6\" (UniqueName: \"kubernetes.io/projected/805371d5-27c1-4895-aab2-23e8e1d9a91b-kube-api-access-r5td6\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541964 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxznh\" (UniqueName: \"kubernetes.io/projected/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-kube-api-access-hxznh\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.541990 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpkhp\" (UniqueName: \"kubernetes.io/projected/3ed6603d-effa-496b-9ed3-16918cb79b7b-kube-api-access-fpkhp\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542017 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q75cx\" (UniqueName: \"kubernetes.io/projected/788d1186-ade1-4f2c-acaa-e0030baa277c-kube-api-access-q75cx\") pod \"multus-admission-controller-857f4d67dd-zs9kz\" (UID: \"788d1186-ade1-4f2c-acaa-e0030baa277c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542037 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-serving-cert\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542070 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542095 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/788d1186-ade1-4f2c-acaa-e0030baa277c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zs9kz\" (UID: \"788d1186-ade1-4f2c-acaa-e0030baa277c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542120 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kncrq\" (UniqueName: \"kubernetes.io/projected/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-kube-api-access-kncrq\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542139 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-node-bootstrap-token\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542156 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/90e317c4-1ff5-45ef-8f05-bbe33bda8434-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542177 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-client-ca\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542196 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlt8b\" (UniqueName: \"kubernetes.io/projected/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-kube-api-access-dlt8b\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542219 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f54e180-97ba-4d10-9bda-7d38a5dab306-config\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542237 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-certs\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542254 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542257 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542276 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542292 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-registration-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542310 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3026143-1986-44ad-b5e7-fbae179a6503-proxy-tls\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542335 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/805371d5-27c1-4895-aab2-23e8e1d9a91b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542352 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48d276e-2b89-494b-bbf6-8471a336a7a0-config\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542369 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk68j\" (UniqueName: \"kubernetes.io/projected/90e317c4-1ff5-45ef-8f05-bbe33bda8434-kube-api-access-bk68j\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.542385 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e690e93c-92e0-4045-88b5-685763786d3a-trusted-ca\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.543201 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-socket-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.543310 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-csi-data-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.543557 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-oauth-serving-cert\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.543581 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e690e93c-92e0-4045-88b5-685763786d3a-trusted-ca\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.543867 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.544000 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-registration-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.544202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/2f18da50-4c56-4f61-bcf4-583bec642127-mountpoint-dir\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.544601 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48d276e-2b89-494b-bbf6-8471a336a7a0-config\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.544970 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-tmpfs\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.545473 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-trusted-ca-bundle\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.546338 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/90e317c4-1ff5-45ef-8f05-bbe33bda8434-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.549278 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ed6603d-effa-496b-9ed3-16918cb79b7b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.549748 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-console-config\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.550608 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48d276e-2b89-494b-bbf6-8471a336a7a0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.551342 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e0e779-21ef-489e-8721-54533e24bf31-secret-volume\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.551448 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-serving-cert\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.551968 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-service-ca\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.552268 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-oauth-config\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.552358 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ed6603d-effa-496b-9ed3-16918cb79b7b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.552849 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.552899 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e690e93c-92e0-4045-88b5-685763786d3a-metrics-tls\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.553511 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/90e317c4-1ff5-45ef-8f05-bbe33bda8434-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.554798 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/805371d5-27c1-4895-aab2-23e8e1d9a91b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.555027 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.558707 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3026143-1986-44ad-b5e7-fbae179a6503-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.563642 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.571039 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.589595 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.595483 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.611628 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.620270 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.630661 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.643350 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.643653 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.143612634 +0000 UTC m=+135.916504962 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.644096 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.644516 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.144500588 +0000 UTC m=+135.917392916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.653060 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.670456 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.690178 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.709343 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.730414 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.734414 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-config\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.745089 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.745297 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.245270269 +0000 UTC m=+136.018162597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.745561 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.745999 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.245979369 +0000 UTC m=+136.018871707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.749897 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.759721 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.769505 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.775890 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/9053d7ed-27f1-470a-8164-6ef32c05ea87-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-phfft\" (UID: \"9053d7ed-27f1-470a-8164-6ef32c05ea87\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.790183 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.810080 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.830513 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.846933 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.847107 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.347080389 +0000 UTC m=+136.119972717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.847580 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.848074 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.348049605 +0000 UTC m=+136.120941973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.850666 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.859564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/805371d5-27c1-4895-aab2-23e8e1d9a91b-srv-cert\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.870399 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.890058 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.908163 4792 request.go:700] Waited for 1.002525205s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-dockercfg-zdk86&limit=500&resourceVersion=0 Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.910796 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.930379 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.939834 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-default-certificate\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.949874 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.950262 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.450226024 +0000 UTC m=+136.223118392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.950571 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.951116 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 18:38:24 crc kubenswrapper[4792]: E1202 18:38:24.951139 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.451114218 +0000 UTC m=+136.224006746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.956421 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-stats-auth\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.970976 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.980706 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-metrics-certs\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.990723 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 18:38:24 crc kubenswrapper[4792]: I1202 18:38:24.992931 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-service-ca-bundle\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.011471 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.031516 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.039720 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3026143-1986-44ad-b5e7-fbae179a6503-proxy-tls\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.051664 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.053306 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.053557 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.553486413 +0000 UTC m=+136.326378781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.054927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.055591 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.555567279 +0000 UTC m=+136.328459647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.071434 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.081458 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/788d1186-ade1-4f2c-acaa-e0030baa277c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zs9kz\" (UID: \"788d1186-ade1-4f2c-acaa-e0030baa277c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.090556 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.123129 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.125990 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.131281 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.150881 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.156332 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.156582 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.656516925 +0000 UTC m=+136.429409283 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.157711 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.158612 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.658589461 +0000 UTC m=+136.431481819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.161496 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-serving-cert\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.170858 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.191369 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.210569 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.212957 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-config\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.230931 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.235952 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-client-ca\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.250460 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.259020 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.259882 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.759861986 +0000 UTC m=+136.532754314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.270891 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.278309 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/ae2be54d-c2f0-427a-bccd-c63bad0a0f0e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rfct2\" (UID: \"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.290348 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.297782 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f54e180-97ba-4d10-9bda-7d38a5dab306-serving-cert\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.309433 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.331298 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.336447 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f54e180-97ba-4d10-9bda-7d38a5dab306-config\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.351646 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.362048 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.362584 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.862562019 +0000 UTC m=+136.635454367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.370656 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.374331 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-webhook-cert\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.378951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-apiservice-cert\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.389828 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.411017 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.430121 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.437942 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/649496fd-1b8f-429a-be02-160ecc5b2ab9-signing-key\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.450555 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.462919 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.463028 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.963007612 +0000 UTC m=+136.735899940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.463550 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.463866 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:25.963852705 +0000 UTC m=+136.736745033 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.469661 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.476227 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/649496fd-1b8f-429a-be02-160ecc5b2ab9-signing-cabundle\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.490711 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.512934 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.522457 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e0e779-21ef-489e-8721-54533e24bf31-config-volume\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.544035 4792 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.544260 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/93d19b8a-404d-4f49-8789-ad474970771a-cert podName:93d19b8a-404d-4f49-8789-ad474970771a nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.044216987 +0000 UTC m=+136.817109345 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/93d19b8a-404d-4f49-8789-ad474970771a-cert") pod "ingress-canary-tk4v8" (UID: "93d19b8a-404d-4f49-8789-ad474970771a") : failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.544963 4792 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.545067 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-node-bootstrap-token podName:1d8206ee-7365-4b1d-8108-6c4626ed7bc3 nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.045025999 +0000 UTC m=+136.817918357 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-node-bootstrap-token") pod "machine-config-server-qjpwp" (UID: "1d8206ee-7365-4b1d-8108-6c4626ed7bc3") : failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.545172 4792 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.545251 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-config-volume podName:67b8fcaa-f575-4b23-8dbe-9c2402abba3d nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.045228234 +0000 UTC m=+136.818120592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-config-volume") pod "dns-default-z9h9q" (UID: "67b8fcaa-f575-4b23-8dbe-9c2402abba3d") : failed to sync configmap cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.545250 4792 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.545358 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-metrics-tls podName:67b8fcaa-f575-4b23-8dbe-9c2402abba3d nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.045332907 +0000 UTC m=+136.818225235 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-metrics-tls") pod "dns-default-z9h9q" (UID: "67b8fcaa-f575-4b23-8dbe-9c2402abba3d") : failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.545394 4792 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.545427 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-certs podName:1d8206ee-7365-4b1d-8108-6c4626ed7bc3 nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.04542016 +0000 UTC m=+136.818312488 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-certs") pod "machine-config-server-qjpwp" (UID: "1d8206ee-7365-4b1d-8108-6c4626ed7bc3") : failed to sync secret cache: timed out waiting for the condition Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.547795 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqhdw\" (UniqueName: \"kubernetes.io/projected/8b019f11-935f-4956-aaf0-c6d2a5d66356-kube-api-access-fqhdw\") pod \"route-controller-manager-6576b87f9c-fjkdx\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.564349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.564574 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.064504893 +0000 UTC m=+136.837397231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.565283 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.565804 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.065792418 +0000 UTC m=+136.838684756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.567179 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhfks\" (UniqueName: \"kubernetes.io/projected/139247ce-bdc4-46c8-8acb-0120504e8855-kube-api-access-zhfks\") pod \"apiserver-7bbb656c7d-j7k6j\" (UID: \"139247ce-bdc4-46c8-8acb-0120504e8855\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.585986 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmzhh\" (UniqueName: \"kubernetes.io/projected/f14e7b6b-9dda-45d2-a3d2-097c3432f736-kube-api-access-vmzhh\") pod \"console-operator-58897d9998-wzsvf\" (UID: \"f14e7b6b-9dda-45d2-a3d2-097c3432f736\") " pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.612137 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsj56\" (UniqueName: \"kubernetes.io/projected/fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1-kube-api-access-tsj56\") pod \"machine-api-operator-5694c8668f-nnst7\" (UID: \"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.623811 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxlkf\" (UniqueName: \"kubernetes.io/projected/ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be-kube-api-access-vxlkf\") pod \"catalog-operator-68c6474976-hcshz\" (UID: \"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.645593 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wtgr\" (UniqueName: \"kubernetes.io/projected/29fa6885-6526-4cdc-aace-fe3194157043-kube-api-access-8wtgr\") pod \"etcd-operator-b45778765-wn9mc\" (UID: \"29fa6885-6526-4cdc-aace-fe3194157043\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.663209 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4hlp\" (UniqueName: \"kubernetes.io/projected/08f1469c-c4e8-4dab-a21e-730dc60f8ff2-kube-api-access-k4hlp\") pod \"cluster-samples-operator-665b6dd947-wmll4\" (UID: \"08f1469c-c4e8-4dab-a21e-730dc60f8ff2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.666724 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.666913 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.166888448 +0000 UTC m=+136.939780776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.667294 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.667736 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.167726701 +0000 UTC m=+136.940619029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.672510 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.673186 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.685338 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhd5x\" (UniqueName: \"kubernetes.io/projected/59c54684-8c51-4e7b-975e-2511cf858db6-kube-api-access-dhd5x\") pod \"openshift-apiserver-operator-796bbdcf4f-wnkgz\" (UID: \"59c54684-8c51-4e7b-975e-2511cf858db6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.687817 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.702268 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.707484 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5ccs\" (UniqueName: \"kubernetes.io/projected/881670bf-7c7d-41c1-aedc-b6d9ba17b368-kube-api-access-v5ccs\") pod \"machine-config-operator-74547568cd-jtl55\" (UID: \"881670bf-7c7d-41c1-aedc-b6d9ba17b368\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.711313 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.721684 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.728603 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s424v\" (UniqueName: \"kubernetes.io/projected/41739ae1-9772-4d87-8efd-6eeec54dadab-kube-api-access-s424v\") pod \"machine-approver-56656f9798-wwvbm\" (UID: \"41739ae1-9772-4d87-8efd-6eeec54dadab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.746155 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thhn7\" (UniqueName: \"kubernetes.io/projected/794efbdc-c6bb-45a1-ab12-65a4ec7dea6c-kube-api-access-thhn7\") pod \"authentication-operator-69f744f599-szz8b\" (UID: \"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.768948 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.769179 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.26914532 +0000 UTC m=+137.042037648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.769384 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.770476 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.270464825 +0000 UTC m=+137.043357153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.783133 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg555\" (UniqueName: \"kubernetes.io/projected/04246b55-2809-4c64-abaf-9bed254d0e80-kube-api-access-cg555\") pod \"oauth-openshift-558db77b4-hgblz\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.792924 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.793018 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.794666 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qvtq\" (UniqueName: \"kubernetes.io/projected/62fcc2b1-4061-42e1-a3f9-15fd336c5e38-kube-api-access-4qvtq\") pod \"apiserver-76f77b778f-wh7rt\" (UID: \"62fcc2b1-4061-42e1-a3f9-15fd336c5e38\") " pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.801955 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.811774 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.831638 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.850891 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.851004 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.870549 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.870607 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.870969 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.370948729 +0000 UTC m=+137.143841047 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.871264 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.871721 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.371705259 +0000 UTC m=+137.144597587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.874591 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.895125 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.901743 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.908203 4792 request.go:700] Waited for 1.92026261s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Ddefault-dockercfg-2llfx&limit=500&resourceVersion=0 Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.912363 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.922736 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.938300 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.950114 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.972143 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.973343 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.973759 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.473729945 +0000 UTC m=+137.246622283 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.974286 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:25 crc kubenswrapper[4792]: E1202 18:38:25.974716 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.474706111 +0000 UTC m=+137.247598439 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.991280 4792 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 18:38:25 crc kubenswrapper[4792]: I1202 18:38:25.996542 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.004901 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nnst7"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.008683 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.010941 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.030135 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.075960 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.076170 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.57614168 +0000 UTC m=+137.349034008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.076281 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-node-bootstrap-token\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.076335 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-certs\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.076399 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.076554 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-config-volume\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.076617 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-metrics-tls\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.076726 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/93d19b8a-404d-4f49-8789-ad474970771a-cert\") pod \"ingress-canary-tk4v8\" (UID: \"93d19b8a-404d-4f49-8789-ad474970771a\") " pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.082202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-config-volume\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.083083 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/93d19b8a-404d-4f49-8789-ad474970771a-cert\") pod \"ingress-canary-tk4v8\" (UID: \"93d19b8a-404d-4f49-8789-ad474970771a\") " pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.084279 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.584259509 +0000 UTC m=+137.357151837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.090313 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-certs\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.094390 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-node-bootstrap-token\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.097099 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-metrics-tls\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.100261 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.108362 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-bound-sa-token\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.125574 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpwrv\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-kube-api-access-cpwrv\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.131150 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mb46\" (UniqueName: \"kubernetes.io/projected/ff2b39c3-c4a3-4d87-b4bd-474d89c04db7-kube-api-access-2mb46\") pod \"dns-operator-744455d44c-f89mv\" (UID: \"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7\") " pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.154083 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd9cd\" (UniqueName: \"kubernetes.io/projected/5fede595-7ee5-4aaf-999d-45eebf4ca097-kube-api-access-gd9cd\") pod \"openshift-config-operator-7777fb866f-nplg6\" (UID: \"5fede595-7ee5-4aaf-999d-45eebf4ca097\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.179218 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.179920 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.679896322 +0000 UTC m=+137.452788650 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.185110 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wzsvf"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.194315 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e48d276e-2b89-494b-bbf6-8471a336a7a0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wpgk7\" (UID: \"e48d276e-2b89-494b-bbf6-8471a336a7a0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.213945 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wn9mc"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.220607 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqjb9\" (UniqueName: \"kubernetes.io/projected/67b8fcaa-f575-4b23-8dbe-9c2402abba3d-kube-api-access-gqjb9\") pod \"dns-default-z9h9q\" (UID: \"67b8fcaa-f575-4b23-8dbe-9c2402abba3d\") " pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.232316 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bhzl\" (UniqueName: \"kubernetes.io/projected/a3026143-1986-44ad-b5e7-fbae179a6503-kube-api-access-5bhzl\") pod \"machine-config-controller-84d6567774-lspv9\" (UID: \"a3026143-1986-44ad-b5e7-fbae179a6503\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.235951 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.239366 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" Dec 02 18:38:26 crc kubenswrapper[4792]: W1202 18:38:26.241752 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod881670bf_7c7d_41c1_aedc_b6d9ba17b368.slice/crio-6d356d3d4a4a26af4700859e62075f11dde7f04bdaf9ee316e90b11a42a19353 WatchSource:0}: Error finding container 6d356d3d4a4a26af4700859e62075f11dde7f04bdaf9ee316e90b11a42a19353: Status 404 returned error can't find the container with id 6d356d3d4a4a26af4700859e62075f11dde7f04bdaf9ee316e90b11a42a19353 Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.256322 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44m86\" (UniqueName: \"kubernetes.io/projected/9053d7ed-27f1-470a-8164-6ef32c05ea87-kube-api-access-44m86\") pod \"control-plane-machine-set-operator-78cbb6b69f-phfft\" (UID: \"9053d7ed-27f1-470a-8164-6ef32c05ea87\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.277963 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x69z\" (UniqueName: \"kubernetes.io/projected/9f54e180-97ba-4d10-9bda-7d38a5dab306-kube-api-access-8x69z\") pod \"service-ca-operator-777779d784-vf9nc\" (UID: \"9f54e180-97ba-4d10-9bda-7d38a5dab306\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.278461 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.280386 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.281280 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.281841 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.781826925 +0000 UTC m=+137.554719243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.329421 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.332640 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.337113 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kncrq\" (UniqueName: \"kubernetes.io/projected/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-kube-api-access-kncrq\") pod \"controller-manager-879f6c89f-f87xj\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.337953 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wh7rt"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.338758 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8774a5f7-7c2b-4bee-843c-ae87c78f7dc3-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5gd9z\" (UID: \"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.350642 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-wzsvf" event={"ID":"f14e7b6b-9dda-45d2-a3d2-097c3432f736","Type":"ContainerStarted","Data":"b2defc172880e865a384f429d10331f76b25c52ed336bfe9fa3e078a6c428ad0"} Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.353410 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjclj\" (UniqueName: \"kubernetes.io/projected/64e0e779-21ef-489e-8721-54533e24bf31-kube-api-access-mjclj\") pod \"collect-profiles-29411670-9n2pw\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.354413 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" event={"ID":"139247ce-bdc4-46c8-8acb-0120504e8855","Type":"ContainerStarted","Data":"6ebe81de1730d242404d4cfc0a45182bae0a32c7dbd6248d77f6d2b2ff3a651c"} Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.356094 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" event={"ID":"881670bf-7c7d-41c1-aedc-b6d9ba17b368","Type":"ContainerStarted","Data":"6d356d3d4a4a26af4700859e62075f11dde7f04bdaf9ee316e90b11a42a19353"} Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.357745 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhcht\" (UniqueName: \"kubernetes.io/projected/cff41058-c666-4dc1-a119-54f157de50b9-kube-api-access-rhcht\") pod \"downloads-7954f5f757-jcpdc\" (UID: \"cff41058-c666-4dc1-a119-54f157de50b9\") " pod="openshift-console/downloads-7954f5f757-jcpdc" Dec 02 18:38:26 crc kubenswrapper[4792]: W1202 18:38:26.361176 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62fcc2b1_4061_42e1_a3f9_15fd336c5e38.slice/crio-5e134da5cb99922cbc83f77350ec7348e42d9a1311c342ccf468ceef082d0d9c WatchSource:0}: Error finding container 5e134da5cb99922cbc83f77350ec7348e42d9a1311c342ccf468ceef082d0d9c: Status 404 returned error can't find the container with id 5e134da5cb99922cbc83f77350ec7348e42d9a1311c342ccf468ceef082d0d9c Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.361307 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67v7d\" (UniqueName: \"kubernetes.io/projected/649496fd-1b8f-429a-be02-160ecc5b2ab9-kube-api-access-67v7d\") pod \"service-ca-9c57cc56f-72k72\" (UID: \"649496fd-1b8f-429a-be02-160ecc5b2ab9\") " pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.361474 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" event={"ID":"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1","Type":"ContainerStarted","Data":"3a8123a53e4ed84ad12254a74b54e0851944408489c41531ae946a295834d6ae"} Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.368566 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" event={"ID":"41739ae1-9772-4d87-8efd-6eeec54dadab","Type":"ContainerStarted","Data":"c3baa6248d91ee492bc1c9225e89ee49b9f362e9f795ff2397a0f58d52ed2d40"} Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.373845 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1bc1adc-a9c6-42a0-a606-ef182242e6ef-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-w48fc\" (UID: \"e1bc1adc-a9c6-42a0-a606-ef182242e6ef\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.383500 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.383647 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.883622824 +0000 UTC m=+137.656515152 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.383875 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.384329 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.884317933 +0000 UTC m=+137.657210321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.385753 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.387931 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd2gp\" (UniqueName: \"kubernetes.io/projected/455642ee-8453-433f-a998-ea30f5baadef-kube-api-access-qd2gp\") pod \"migrator-59844c95c7-r4dvq\" (UID: \"455642ee-8453-433f-a998-ea30f5baadef\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.405086 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlt8b\" (UniqueName: \"kubernetes.io/projected/1d8206ee-7365-4b1d-8108-6c4626ed7bc3-kube-api-access-dlt8b\") pod \"machine-config-server-qjpwp\" (UID: \"1d8206ee-7365-4b1d-8108-6c4626ed7bc3\") " pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.413009 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.416841 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.430791 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h4lm\" (UniqueName: \"kubernetes.io/projected/2f18da50-4c56-4f61-bcf4-583bec642127-kube-api-access-2h4lm\") pod \"csi-hostpathplugin-r6ncj\" (UID: \"2f18da50-4c56-4f61-bcf4-583bec642127\") " pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.453179 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.464966 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9txdc\" (UniqueName: \"kubernetes.io/projected/05a76a9b-9018-4c8c-a2c7-a83641ca60b7-kube-api-access-9txdc\") pod \"router-default-5444994796-d9jsl\" (UID: \"05a76a9b-9018-4c8c-a2c7-a83641ca60b7\") " pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.466657 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-jcpdc" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.473096 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk68j\" (UniqueName: \"kubernetes.io/projected/90e317c4-1ff5-45ef-8f05-bbe33bda8434-kube-api-access-bk68j\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.482384 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.485230 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.485372 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.985345951 +0000 UTC m=+137.758238279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.485623 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.486183 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:26.986164633 +0000 UTC m=+137.759056971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.489065 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.495652 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjxq7\" (UniqueName: \"kubernetes.io/projected/95d5c817-e302-4f46-9db2-333b21486a7c-kube-api-access-xjxq7\") pod \"console-f9d7485db-cds6s\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.497400 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.505830 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e690e93c-92e0-4045-88b5-685763786d3a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.526073 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.533447 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmwjw\" (UniqueName: \"kubernetes.io/projected/ae2be54d-c2f0-427a-bccd-c63bad0a0f0e-kube-api-access-nmwjw\") pod \"package-server-manager-789f6589d5-rfct2\" (UID: \"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.553242 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cprg\" (UniqueName: \"kubernetes.io/projected/e690e93c-92e0-4045-88b5-685763786d3a-kube-api-access-9cprg\") pod \"ingress-operator-5b745b69d9-khg9t\" (UID: \"e690e93c-92e0-4045-88b5-685763786d3a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.561187 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.563209 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.575900 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.576211 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-szz8b"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.586217 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-72k72" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.586398 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.587249 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.087197512 +0000 UTC m=+137.860089840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.587475 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.587869 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.08785598 +0000 UTC m=+137.860748308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.590493 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hgblz"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.592912 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.593012 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/90e317c4-1ff5-45ef-8f05-bbe33bda8434-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pkwvd\" (UID: \"90e317c4-1ff5-45ef-8f05-bbe33bda8434\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.600939 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxznh\" (UniqueName: \"kubernetes.io/projected/c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac-kube-api-access-hxznh\") pod \"packageserver-d55dfcdfc-ggtd2\" (UID: \"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.604551 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-z9h9q"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.616072 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ft28\" (UniqueName: \"kubernetes.io/projected/d4f401bf-9a39-4f1b-a24f-9f5db5a36e40-kube-api-access-5ft28\") pod \"openshift-controller-manager-operator-756b6f6bc6-bjntn\" (UID: \"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.625683 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-qjpwp" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.626630 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv9ls\" (UniqueName: \"kubernetes.io/projected/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-kube-api-access-tv9ls\") pod \"marketplace-operator-79b997595-nzgzn\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.651634 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.662711 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.668262 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q88jv\" (UniqueName: \"kubernetes.io/projected/93d19b8a-404d-4f49-8789-ad474970771a-kube-api-access-q88jv\") pod \"ingress-canary-tk4v8\" (UID: \"93d19b8a-404d-4f49-8789-ad474970771a\") " pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.669023 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpkhp\" (UniqueName: \"kubernetes.io/projected/3ed6603d-effa-496b-9ed3-16918cb79b7b-kube-api-access-fpkhp\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdh2w\" (UID: \"3ed6603d-effa-496b-9ed3-16918cb79b7b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.689843 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.690425 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.190403179 +0000 UTC m=+137.963295507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.694423 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5td6\" (UniqueName: \"kubernetes.io/projected/805371d5-27c1-4895-aab2-23e8e1d9a91b-kube-api-access-r5td6\") pod \"olm-operator-6b444d44fb-7txmt\" (UID: \"805371d5-27c1-4895-aab2-23e8e1d9a91b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.707154 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q75cx\" (UniqueName: \"kubernetes.io/projected/788d1186-ade1-4f2c-acaa-e0030baa277c-kube-api-access-q75cx\") pod \"multus-admission-controller-857f4d67dd-zs9kz\" (UID: \"788d1186-ade1-4f2c-acaa-e0030baa277c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:26 crc kubenswrapper[4792]: W1202 18:38:26.717763 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67b8fcaa_f575_4b23_8dbe_9c2402abba3d.slice/crio-4c654d991d4d0847a876d3f22943196cbda60d4fdb396e0b6221e6a9e6b1b299 WatchSource:0}: Error finding container 4c654d991d4d0847a876d3f22943196cbda60d4fdb396e0b6221e6a9e6b1b299: Status 404 returned error can't find the container with id 4c654d991d4d0847a876d3f22943196cbda60d4fdb396e0b6221e6a9e6b1b299 Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.723029 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.730974 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.738080 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.745924 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.759180 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.772543 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.792515 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.793072 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.293055121 +0000 UTC m=+138.065947449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.810924 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.851373 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.876585 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.894031 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.894250 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.394219703 +0000 UTC m=+138.167112031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.894313 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.895086 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.395079296 +0000 UTC m=+138.167971624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.917738 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq"] Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.918177 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-tk4v8" Dec 02 18:38:26 crc kubenswrapper[4792]: I1202 18:38:26.995641 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:26 crc kubenswrapper[4792]: E1202 18:38:26.996077 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.496056444 +0000 UTC m=+138.268948772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.080570 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-jcpdc"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.081089 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.089552 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-f89mv"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.098439 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.098909 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.598888261 +0000 UTC m=+138.371780589 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.114104 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-nplg6"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.200646 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.201120 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.701102291 +0000 UTC m=+138.473994619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: W1202 18:38:27.227284 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff2b39c3_c4a3_4d87_b4bd_474d89c04db7.slice/crio-dcbeb5671ec4c1915e9fd9587a2319e4ce3e11e4869f95102a6e3f81d480f41f WatchSource:0}: Error finding container dcbeb5671ec4c1915e9fd9587a2319e4ce3e11e4869f95102a6e3f81d480f41f: Status 404 returned error can't find the container with id dcbeb5671ec4c1915e9fd9587a2319e4ce3e11e4869f95102a6e3f81d480f41f Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.260649 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.301760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.302108 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.802097389 +0000 UTC m=+138.574989717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.366217 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.403878 4792 generic.go:334] "Generic (PLEG): container finished" podID="139247ce-bdc4-46c8-8acb-0120504e8855" containerID="97800c74dfdbaf6d4c530b84cb1e317e438c1c934eef9818173ff258c565a0eb" exitCode=0 Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.403976 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" event={"ID":"139247ce-bdc4-46c8-8acb-0120504e8855","Type":"ContainerDied","Data":"97800c74dfdbaf6d4c530b84cb1e317e438c1c934eef9818173ff258c565a0eb"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.404169 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.404573 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.904504664 +0000 UTC m=+138.677396992 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.406066 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.406127 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" event={"ID":"04246b55-2809-4c64-abaf-9bed254d0e80","Type":"ContainerStarted","Data":"0ce3005a6024188ace08dceaad20e852456465456d985e4cbec333f57937db8e"} Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.406186 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:27.906172199 +0000 UTC m=+138.679064527 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.410025 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" event={"ID":"8b019f11-935f-4956-aaf0-c6d2a5d66356","Type":"ContainerStarted","Data":"e74eb100c940267ab18be4a9ff5e604cb9379b0b07c40dbc9728a2bde6eb4664"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.410071 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" event={"ID":"8b019f11-935f-4956-aaf0-c6d2a5d66356","Type":"ContainerStarted","Data":"289ca8ebb1ced68e950b2876acebd4c063e6783e7b601445c1865a20e65c84b4"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.410247 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.415984 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" event={"ID":"41739ae1-9772-4d87-8efd-6eeec54dadab","Type":"ContainerStarted","Data":"b75e6fb3bcddcf6dbdbbb03ee6178056fe7525f2d2b3d8b074992b7ad19460c5"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.416022 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" event={"ID":"41739ae1-9772-4d87-8efd-6eeec54dadab","Type":"ContainerStarted","Data":"0f6f1f3ebcaa8a7d7b3e4dcf5d2aa404f7a9e712e5f469b17353886e64a78082"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.429978 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-jcpdc" event={"ID":"cff41058-c666-4dc1-a119-54f157de50b9","Type":"ContainerStarted","Data":"6bc449aae6d688c74831dab5b966f0702fa693d335178f60167f6f91fb3fe64c"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.486911 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" event={"ID":"08f1469c-c4e8-4dab-a21e-730dc60f8ff2","Type":"ContainerStarted","Data":"0e43523982ae3c891b8092c6081498edcca897337948744775f51cbe98718eea"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.487185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" event={"ID":"08f1469c-c4e8-4dab-a21e-730dc60f8ff2","Type":"ContainerStarted","Data":"1303fe3e8328dc9618bdd49f57c312bcabc5359bdf3823595a5e90bc3a4db011"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.507487 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.509536 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.009503109 +0000 UTC m=+138.782395437 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.550555 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" event={"ID":"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7","Type":"ContainerStarted","Data":"dcbeb5671ec4c1915e9fd9587a2319e4ce3e11e4869f95102a6e3f81d480f41f"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.567459 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" event={"ID":"5fede595-7ee5-4aaf-999d-45eebf4ca097","Type":"ContainerStarted","Data":"a980c24945d49bee344dc7bfe065029a14610a1e5ba69e3065df4aa8c1659725"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.568927 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-d9jsl" event={"ID":"05a76a9b-9018-4c8c-a2c7-a83641ca60b7","Type":"ContainerStarted","Data":"3d8148eb9b4cc52cbe39e208467a2456dd66a82b7d83b8c595d52fbb1e564b7b"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.572450 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" event={"ID":"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c","Type":"ContainerStarted","Data":"7a4a0f74dc7036e358ab86d44acaa3f5c51ae3ff474bab2bab6b20dc13f0de26"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.611578 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.611937 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.111922874 +0000 UTC m=+138.884815212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.650331 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f87xj"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.687457 4792 generic.go:334] "Generic (PLEG): container finished" podID="62fcc2b1-4061-42e1-a3f9-15fd336c5e38" containerID="17de4eda19457c793f37014e4f0a10d8c46b0deaf5795945ef0e46cf844112e5" exitCode=0 Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.687578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" event={"ID":"62fcc2b1-4061-42e1-a3f9-15fd336c5e38","Type":"ContainerDied","Data":"17de4eda19457c793f37014e4f0a10d8c46b0deaf5795945ef0e46cf844112e5"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.687606 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" event={"ID":"62fcc2b1-4061-42e1-a3f9-15fd336c5e38","Type":"ContainerStarted","Data":"5e134da5cb99922cbc83f77350ec7348e42d9a1311c342ccf468ceef082d0d9c"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.690083 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" event={"ID":"a3026143-1986-44ad-b5e7-fbae179a6503","Type":"ContainerStarted","Data":"465af98766d85098c9fe44914033fdd0f0a4ad9ca97346fc6a23e425fe649871"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.692159 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" event={"ID":"455642ee-8453-433f-a998-ea30f5baadef","Type":"ContainerStarted","Data":"0639f121b37d52478613be07529bd62015dd1332915aadfc97cf571d78738458"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.696671 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" event={"ID":"e1bc1adc-a9c6-42a0-a606-ef182242e6ef","Type":"ContainerStarted","Data":"dd3ffbee37e862f55cc952055884e5bfe265c520167fb0fbb690529a66baef21"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.712441 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.712906 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.212889851 +0000 UTC m=+138.985782179 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.741863 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" event={"ID":"59c54684-8c51-4e7b-975e-2511cf858db6","Type":"ContainerStarted","Data":"b414b86cbe0b5677938723c34bb9a5a2fe3f74c30a4a92aaa18f9831c215182d"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.741916 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" event={"ID":"59c54684-8c51-4e7b-975e-2511cf858db6","Type":"ContainerStarted","Data":"090e1fa01ffd0fcb9f9eb2c2cb2c4095aab29935920fd4234bd07d5135455986"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.748206 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.751301 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" event={"ID":"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be","Type":"ContainerStarted","Data":"6890d6067886a73dfd031b6be87a600b3127c561474b7e4f0e78a9dc2a157083"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.751351 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" event={"ID":"ce83fd31-f9fe-46f9-b865-cdfcfcf8e6be","Type":"ContainerStarted","Data":"6d9b979bc193f21e01edc79a39e5c5712caac8886f84f59dc06a19eedda607f5"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.764674 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.781535 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-wzsvf" event={"ID":"f14e7b6b-9dda-45d2-a3d2-097c3432f736","Type":"ContainerStarted","Data":"5fd24b1b128a0f5b7d60a2e5f784cca4c4613a61b81c57a4e475b727cbbcb5b5"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.782477 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.797718 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.797762 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.817623 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.818055 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" event={"ID":"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3","Type":"ContainerStarted","Data":"165e40b4fb63567aa096ee3002855f7c380cdf1d9133269527855c8c74e470ed"} Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.819533 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.31949875 +0000 UTC m=+139.092391078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.824295 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc"] Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.824593 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qjpwp" event={"ID":"1d8206ee-7365-4b1d-8108-6c4626ed7bc3","Type":"ContainerStarted","Data":"744a886b8ea056c332482a2e0c596af358bd633d61a616ae46db8e1b289ceb9b"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.844597 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" event={"ID":"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1","Type":"ContainerStarted","Data":"56011a362c019e345b45afd370c08e4d63eedb4824421c4154f69fcd55052da1"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.853296 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-z9h9q" event={"ID":"67b8fcaa-f575-4b23-8dbe-9c2402abba3d","Type":"ContainerStarted","Data":"4c654d991d4d0847a876d3f22943196cbda60d4fdb396e0b6221e6a9e6b1b299"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.860371 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" event={"ID":"881670bf-7c7d-41c1-aedc-b6d9ba17b368","Type":"ContainerStarted","Data":"97d9ba19c05cbfe3d15654b34d64e5c144aa5464cb378fb43db4d551a0203fbf"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.862618 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" event={"ID":"29fa6885-6526-4cdc-aace-fe3194157043","Type":"ContainerStarted","Data":"364db4f8cef64f2633293d83721d1f1d1e99f8b11e13a99f82e93b04c059a879"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.862654 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" event={"ID":"29fa6885-6526-4cdc-aace-fe3194157043","Type":"ContainerStarted","Data":"e9fc5b7202798b924635c4e4195c1ca62cf71c168d9c235766be16dba4db3227"} Dec 02 18:38:27 crc kubenswrapper[4792]: I1202 18:38:27.923343 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:27 crc kubenswrapper[4792]: E1202 18:38:27.924865 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.424837594 +0000 UTC m=+139.197729922 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.027321 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.029401 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.529381687 +0000 UTC m=+139.302274015 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.049278 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-wzsvf" Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.134394 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.134862 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.634841295 +0000 UTC m=+139.407733623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.236039 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.236406 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.736390708 +0000 UTC m=+139.509283036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.340011 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.340277 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.840243812 +0000 UTC m=+139.613136140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.340680 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.341158 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.841139116 +0000 UTC m=+139.614031434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.345424 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-wn9mc" podStartSLOduration=119.345410371 podStartE2EDuration="1m59.345410371s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:28.345305158 +0000 UTC m=+139.118197496" watchObservedRunningTime="2025-12-02 18:38:28.345410371 +0000 UTC m=+139.118302699" Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.391658 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" podStartSLOduration=119.391635885 podStartE2EDuration="1m59.391635885s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:28.391598604 +0000 UTC m=+139.164490942" watchObservedRunningTime="2025-12-02 18:38:28.391635885 +0000 UTC m=+139.164528213" Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.411389 4792 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-fjkdx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.411479 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" podUID="8b019f11-935f-4956-aaf0-c6d2a5d66356" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.442241 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.442756 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:28.94272577 +0000 UTC m=+139.715618098 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.512477 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wnkgz" podStartSLOduration=120.512453226 podStartE2EDuration="2m0.512453226s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:28.50741771 +0000 UTC m=+139.280310038" watchObservedRunningTime="2025-12-02 18:38:28.512453226 +0000 UTC m=+139.285345554" Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.513823 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" podStartSLOduration=119.513816733 podStartE2EDuration="1m59.513816733s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:28.466723635 +0000 UTC m=+139.239615963" watchObservedRunningTime="2025-12-02 18:38:28.513816733 +0000 UTC m=+139.286709061" Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.551135 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.551717 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.051701362 +0000 UTC m=+139.824593690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.652496 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.653590 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.153567133 +0000 UTC m=+139.926459471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.755038 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.755374 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.255359872 +0000 UTC m=+140.028252200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.856021 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.856223 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.356195925 +0000 UTC m=+140.129088253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.856382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.856747 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.35673933 +0000 UTC m=+140.129631658 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.867967 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" event={"ID":"e48d276e-2b89-494b-bbf6-8471a336a7a0","Type":"ContainerStarted","Data":"96087a0d245ef30b248a3e0bde2de9d9397076e65c26f2da23c2878acae3b354"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.869482 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" event={"ID":"794efbdc-c6bb-45a1-ab12-65a4ec7dea6c","Type":"ContainerStarted","Data":"3fbb67acf18b651d9b2d1e0e39f483d933aa28351a22bc99614314d00b80b3f9"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.870952 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" event={"ID":"9053d7ed-27f1-470a-8164-6ef32c05ea87","Type":"ContainerStarted","Data":"c27db551c6c06309a1e8ca016abf555b5a953e8a21c79ea3aa6da2c67d0fd5b1"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.872462 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nnst7" event={"ID":"fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1","Type":"ContainerStarted","Data":"7d8f43143b7d16daf0dbd3bd51772b5e15a2da01cde1174f5dee5ecf40e3d5eb"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.874150 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-jtl55" event={"ID":"881670bf-7c7d-41c1-aedc-b6d9ba17b368","Type":"ContainerStarted","Data":"63e7c309371b31faaa4b5bb5e011097ad8176662e9a916a315939ee7711e769e"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.875805 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" event={"ID":"90e317c4-1ff5-45ef-8f05-bbe33bda8434","Type":"ContainerStarted","Data":"e670836404b5124b0c6fa0203f5a43f83cdd016860f64dc12da4a4a1eaef4815"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.876792 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" event={"ID":"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb","Type":"ContainerStarted","Data":"0fcf1acecb11145e68c5e062d86e3049e5ebdb4881bb2dc4593d4753a42087f6"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.877892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" event={"ID":"a3026143-1986-44ad-b5e7-fbae179a6503","Type":"ContainerStarted","Data":"f7c510f4c5ba9376394953aa0311142709b52815e75ef740dd76634edbe5949e"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.878909 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" event={"ID":"9f54e180-97ba-4d10-9bda-7d38a5dab306","Type":"ContainerStarted","Data":"1ebb33482619a34cd093edeebf36ae93f9c6ab657efcad7e0b84e4935e88b136"} Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.957448 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.957754 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.457711807 +0000 UTC m=+140.230604155 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:28 crc kubenswrapper[4792]: I1202 18:38:28.957844 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:28 crc kubenswrapper[4792]: E1202 18:38:28.959868 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.459808573 +0000 UTC m=+140.232701051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.040353 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" podStartSLOduration=120.04033015 podStartE2EDuration="2m0.04033015s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:29.025563413 +0000 UTC m=+139.798455761" watchObservedRunningTime="2025-12-02 18:38:29.04033015 +0000 UTC m=+139.813222478" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.040986 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-tk4v8"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.051091 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z4jbn"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.052810 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.055223 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 18:38:29 crc kubenswrapper[4792]: W1202 18:38:29.059263 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93d19b8a_404d_4f49_8789_ad474970771a.slice/crio-760a2e88cbc12875168b07867f706ab94b943b6f1eb1bfcfe116234afe6ceed1 WatchSource:0}: Error finding container 760a2e88cbc12875168b07867f706ab94b943b6f1eb1bfcfe116234afe6ceed1: Status 404 returned error can't find the container with id 760a2e88cbc12875168b07867f706ab94b943b6f1eb1bfcfe116234afe6ceed1 Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.059670 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.060167 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.560149493 +0000 UTC m=+140.333041821 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.060929 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p6vhp"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.062042 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.072402 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.073944 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-wzsvf" podStartSLOduration=121.073927464 podStartE2EDuration="2m1.073927464s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:29.072480985 +0000 UTC m=+139.845373313" watchObservedRunningTime="2025-12-02 18:38:29.073927464 +0000 UTC m=+139.846819802" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.080569 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.088436 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.089779 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.098068 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.099591 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zs9kz"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.102088 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.119439 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z4jbn"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.124534 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cds6s"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.126103 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p6vhp"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.130650 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.137077 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r6ncj"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.140008 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.142336 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-hcshz" podStartSLOduration=120.142314524 podStartE2EDuration="2m0.142314524s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:29.11241593 +0000 UTC m=+139.885308258" watchObservedRunningTime="2025-12-02 18:38:29.142314524 +0000 UTC m=+139.915206872" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.145001 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wwvbm" podStartSLOduration=121.144990126 podStartE2EDuration="2m1.144990126s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:29.141391339 +0000 UTC m=+139.914283687" watchObservedRunningTime="2025-12-02 18:38:29.144990126 +0000 UTC m=+139.917882454" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.156713 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.161439 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nzgzn"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.162314 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhrp7\" (UniqueName: \"kubernetes.io/projected/77b66620-e883-40d8-8294-b6b4a2f3ad8c-kube-api-access-dhrp7\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.162354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbtjp\" (UniqueName: \"kubernetes.io/projected/bba4322e-397d-4b6a-b52c-14dfeecbf071-kube-api-access-qbtjp\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.162387 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-utilities\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.162411 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-catalog-content\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.162451 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-utilities\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.166423 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.166584 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-catalog-content\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.168424 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-72k72"] Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.174739 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.674715386 +0000 UTC m=+140.447607714 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: W1202 18:38:29.175318 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod788d1186_ade1_4f2c_acaa_e0030baa277c.slice/crio-88c51972fbd04394589cd51d85f610a1915296955db0d262e00073796c2be102 WatchSource:0}: Error finding container 88c51972fbd04394589cd51d85f610a1915296955db0d262e00073796c2be102: Status 404 returned error can't find the container with id 88c51972fbd04394589cd51d85f610a1915296955db0d262e00073796c2be102 Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.248706 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qrcm2"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.251413 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qrcm2"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.251744 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.253058 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-szz8b" podStartSLOduration=121.253033423 podStartE2EDuration="2m1.253033423s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:29.217359724 +0000 UTC m=+139.990252052" watchObservedRunningTime="2025-12-02 18:38:29.253033423 +0000 UTC m=+140.025925751" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.268186 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.268478 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhrp7\" (UniqueName: \"kubernetes.io/projected/77b66620-e883-40d8-8294-b6b4a2f3ad8c-kube-api-access-dhrp7\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.268512 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbtjp\" (UniqueName: \"kubernetes.io/projected/bba4322e-397d-4b6a-b52c-14dfeecbf071-kube-api-access-qbtjp\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.268559 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-utilities\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.268577 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-catalog-content\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.268611 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-utilities\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.268651 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-catalog-content\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.268862 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.768843869 +0000 UTC m=+140.541736197 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.270555 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-utilities\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.270779 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-catalog-content\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.270796 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-catalog-content\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.273835 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-utilities\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.299374 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhrp7\" (UniqueName: \"kubernetes.io/projected/77b66620-e883-40d8-8294-b6b4a2f3ad8c-kube-api-access-dhrp7\") pod \"community-operators-z4jbn\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.309785 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbtjp\" (UniqueName: \"kubernetes.io/projected/bba4322e-397d-4b6a-b52c-14dfeecbf071-kube-api-access-qbtjp\") pod \"certified-operators-p6vhp\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.371482 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-catalog-content\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.372192 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.372222 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-utilities\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.372245 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27prc\" (UniqueName: \"kubernetes.io/projected/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-kube-api-access-27prc\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.372857 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.872838407 +0000 UTC m=+140.645730735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.389475 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.393158 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.431727 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4kkfv"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.434362 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.474264 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.474652 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4kkfv"] Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.474939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-catalog-content\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.475037 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-utilities\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.475074 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27prc\" (UniqueName: \"kubernetes.io/projected/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-kube-api-access-27prc\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.475161 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:29.97513857 +0000 UTC m=+140.748030898 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.475485 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-catalog-content\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.475549 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-utilities\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.522624 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27prc\" (UniqueName: \"kubernetes.io/projected/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-kube-api-access-27prc\") pod \"community-operators-qrcm2\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.575838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.576438 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqppw\" (UniqueName: \"kubernetes.io/projected/a2745526-4b6e-4ccb-82d9-106a0bf83b74-kube-api-access-pqppw\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.576513 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-catalog-content\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.576610 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-utilities\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.576641 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.576954 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.076937709 +0000 UTC m=+140.849830037 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.680643 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.681454 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqppw\" (UniqueName: \"kubernetes.io/projected/a2745526-4b6e-4ccb-82d9-106a0bf83b74-kube-api-access-pqppw\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.681542 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-catalog-content\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.681607 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-utilities\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.682125 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-utilities\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.682196 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.182181871 +0000 UTC m=+140.955074199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.682600 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-catalog-content\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.702847 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqppw\" (UniqueName: \"kubernetes.io/projected/a2745526-4b6e-4ccb-82d9-106a0bf83b74-kube-api-access-pqppw\") pod \"certified-operators-4kkfv\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.784922 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.788552 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.288516082 +0000 UTC m=+141.061408400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.886127 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.887301 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.38727941 +0000 UTC m=+141.160171738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.899764 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:29 crc kubenswrapper[4792]: E1202 18:38:29.900185 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.400167607 +0000 UTC m=+141.173059935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.957301 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-d9jsl" event={"ID":"05a76a9b-9018-4c8c-a2c7-a83641ca60b7","Type":"ContainerStarted","Data":"28e11b3207233ab7639cffff53a1dbe600a741a6d32d98c64836ed2e5f99f83c"} Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.986798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" event={"ID":"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e","Type":"ContainerStarted","Data":"8c06eed5f74839fde535b9f7f296f8ed56b9bb395fa02b1d0aebe22730b380c6"} Dec 02 18:38:29 crc kubenswrapper[4792]: I1202 18:38:29.987088 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.001623 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.008953 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" event={"ID":"62fcc2b1-4061-42e1-a3f9-15fd336c5e38","Type":"ContainerStarted","Data":"5f5e1f32dfaf7095c40350c1f3c494f3e834d2b43fe70171bf1fedd62365b01a"} Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.013116 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.513080135 +0000 UTC m=+141.285972463 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.016137 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" event={"ID":"2f18da50-4c56-4f61-bcf4-583bec642127","Type":"ContainerStarted","Data":"8c6d5424ec2319c21b9ce07706a7c9163d37588489097765dc7d1a67453471d8"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.042407 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" event={"ID":"04246b55-2809-4c64-abaf-9bed254d0e80","Type":"ContainerStarted","Data":"270aab60c131fe1e3ec3b3fd138f3db266cb2cc4f44f6de05f377affd8afd305"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.043489 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.054703 4792 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hgblz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.054760 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.080867 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" event={"ID":"64e0e779-21ef-489e-8721-54533e24bf31","Type":"ContainerStarted","Data":"9036ab47a42d35be93ca40cdb8543c12f9d29f923ee09c16cf16b66ee6c0b2f8"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.090828 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" event={"ID":"9053d7ed-27f1-470a-8164-6ef32c05ea87","Type":"ContainerStarted","Data":"2f05f7e4031fd6778c17a8642cab2809c3c6d88602c66daaba24242fbdbc7057"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.103964 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" event={"ID":"8774a5f7-7c2b-4bee-843c-ae87c78f7dc3","Type":"ContainerStarted","Data":"6a8b5dc7e35bd3842683915cd8e015b435c180847b151b17352ff06f85e58573"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.115277 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.133731 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.633709741 +0000 UTC m=+141.406602069 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.148428 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" podStartSLOduration=122.148409066 podStartE2EDuration="2m2.148409066s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.144015438 +0000 UTC m=+140.916907766" watchObservedRunningTime="2025-12-02 18:38:30.148409066 +0000 UTC m=+140.921301394" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.154044 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p6vhp"] Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.154676 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" event={"ID":"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40","Type":"ContainerStarted","Data":"04fd8a27463dd3ae39d736a04d004d0545ece43a25290581f74a2ff2271003a9"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.173066 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" event={"ID":"788d1186-ade1-4f2c-acaa-e0030baa277c","Type":"ContainerStarted","Data":"88c51972fbd04394589cd51d85f610a1915296955db0d262e00073796c2be102"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.175966 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cds6s" event={"ID":"95d5c817-e302-4f46-9db2-333b21486a7c","Type":"ContainerStarted","Data":"57cb29eca3f408a62a2f48e72eec76b83bd73a0288a8c2ea02f3e8db8f116018"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.188372 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5gd9z" podStartSLOduration=121.188345331 podStartE2EDuration="2m1.188345331s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.187510589 +0000 UTC m=+140.960402917" watchObservedRunningTime="2025-12-02 18:38:30.188345331 +0000 UTC m=+140.961237669" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.212658 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" event={"ID":"e1bc1adc-a9c6-42a0-a606-ef182242e6ef","Type":"ContainerStarted","Data":"b45195df9ae5188dc3cccfb36b1b659ff5a60f5b7fbad8b5f512d1979885d81b"} Dec 02 18:38:30 crc kubenswrapper[4792]: W1202 18:38:30.225987 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbba4322e_397d_4b6a_b52c_14dfeecbf071.slice/crio-f26e5b950ce6a1657f6bc0e3c4ab8103cb57a2c97e281c4ac4a7b6bdc0a824d8 WatchSource:0}: Error finding container f26e5b950ce6a1657f6bc0e3c4ab8103cb57a2c97e281c4ac4a7b6bdc0a824d8: Status 404 returned error can't find the container with id f26e5b950ce6a1657f6bc0e3c4ab8103cb57a2c97e281c4ac4a7b6bdc0a824d8 Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.226744 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.229808 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-d9jsl" podStartSLOduration=121.229787446 podStartE2EDuration="2m1.229787446s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.228700787 +0000 UTC m=+141.001593115" watchObservedRunningTime="2025-12-02 18:38:30.229787446 +0000 UTC m=+141.002679774" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.235094 4792 generic.go:334] "Generic (PLEG): container finished" podID="5fede595-7ee5-4aaf-999d-45eebf4ca097" containerID="bcfbc099f97fb24ddfd9b64b1f3b2a8d5362cc8e3bcae75aa21bc7e7d26e7e26" exitCode=0 Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.235408 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" event={"ID":"5fede595-7ee5-4aaf-999d-45eebf4ca097","Type":"ContainerDied","Data":"bcfbc099f97fb24ddfd9b64b1f3b2a8d5362cc8e3bcae75aa21bc7e7d26e7e26"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.248539 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" event={"ID":"3ed6603d-effa-496b-9ed3-16918cb79b7b","Type":"ContainerStarted","Data":"471f39a166037178e42f881886fcdf7e1cdb0303fee22a4aea61497fee5b2661"} Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.266955 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.766933096 +0000 UTC m=+141.539825424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.280239 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" event={"ID":"90e317c4-1ff5-45ef-8f05-bbe33bda8434","Type":"ContainerStarted","Data":"ce74c26786d817b7a6411a41aef1a98e0dc2523ff454ac5b75181bedcde0e23e"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.322390 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-phfft" podStartSLOduration=121.322367797 podStartE2EDuration="2m1.322367797s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.278462796 +0000 UTC m=+141.051355134" watchObservedRunningTime="2025-12-02 18:38:30.322367797 +0000 UTC m=+141.095260125" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.333883 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.334218 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.834205236 +0000 UTC m=+141.607097564 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.345949 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" event={"ID":"9f54e180-97ba-4d10-9bda-7d38a5dab306","Type":"ContainerStarted","Data":"59550211ab54cd4e9ef169788f1e6cf901fe79166e2a53ac50bfbc464aad43ff"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.364301 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-72k72" event={"ID":"649496fd-1b8f-429a-be02-160ecc5b2ab9","Type":"ContainerStarted","Data":"093245ff2bb9bb6dda485de564f428fdb6c9edabb86194004ed2cd51f2f40f3a"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.370760 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-tk4v8" event={"ID":"93d19b8a-404d-4f49-8789-ad474970771a","Type":"ContainerStarted","Data":"2c66c1b785263633306750236be05883ed58a59ac1e723df0d94ffcfa8e018d0"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.370823 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-tk4v8" event={"ID":"93d19b8a-404d-4f49-8789-ad474970771a","Type":"ContainerStarted","Data":"760a2e88cbc12875168b07867f706ab94b943b6f1eb1bfcfe116234afe6ceed1"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.379807 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-w48fc" podStartSLOduration=121.379754351 podStartE2EDuration="2m1.379754351s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.328234125 +0000 UTC m=+141.101126453" watchObservedRunningTime="2025-12-02 18:38:30.379754351 +0000 UTC m=+141.152646669" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.386837 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" event={"ID":"ab249412-8c3a-4e5f-b84a-374b19cc1dc9","Type":"ContainerStarted","Data":"4329d7a93f95071e1c5fa95afbc2403ca98bf4c07209be09f92ac45c60990060"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.387701 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.392408 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" event={"ID":"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7","Type":"ContainerStarted","Data":"ddbd2ead98a3e18c9bedacec6cde77916eef3b96831a72f502fb04a23ee99998"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.392954 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nzgzn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.393004 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.419070 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-jcpdc" event={"ID":"cff41058-c666-4dc1-a119-54f157de50b9","Type":"ContainerStarted","Data":"c70911a0077b3e20570e693c1863f0c075c0c19ba113f2f6257ab9f40b650e07"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.419126 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-jcpdc" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.420504 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-jcpdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.420558 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jcpdc" podUID="cff41058-c666-4dc1-a119-54f157de50b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.437029 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.437261 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.937242108 +0000 UTC m=+141.710134436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.437498 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.465143 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:30.965123128 +0000 UTC m=+141.738015446 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.469118 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pkwvd" podStartSLOduration=122.469094915 podStartE2EDuration="2m2.469094915s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.417330992 +0000 UTC m=+141.190223320" watchObservedRunningTime="2025-12-02 18:38:30.469094915 +0000 UTC m=+141.241987243" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.475950 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" event={"ID":"e690e93c-92e0-4045-88b5-685763786d3a","Type":"ContainerStarted","Data":"35a2a1448da35042c469222a870173dda52ec40d858f630364f46115b5aea477"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.486864 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-tk4v8" podStartSLOduration=7.486841843 podStartE2EDuration="7.486841843s" podCreationTimestamp="2025-12-02 18:38:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.466040623 +0000 UTC m=+141.238932951" watchObservedRunningTime="2025-12-02 18:38:30.486841843 +0000 UTC m=+141.259734171" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.490710 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" event={"ID":"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb","Type":"ContainerStarted","Data":"056613063c45fde111acb78cdcca0d1a8819bc3c5d4b121856a75320de201301"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.493397 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.507989 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.530174 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.541947 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:30 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:30 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:30 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.542016 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.542550 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" event={"ID":"08f1469c-c4e8-4dab-a21e-730dc60f8ff2","Type":"ContainerStarted","Data":"5d20e1df874aba9e40bb0f41ea378679c8ff0a88daedd086dd18c175d946fa55"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.543781 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.554011 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.053961179 +0000 UTC m=+141.826853507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.557789 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z4jbn"] Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.558568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-qjpwp" event={"ID":"1d8206ee-7365-4b1d-8108-6c4626ed7bc3","Type":"ContainerStarted","Data":"9111848d233ca812111dba51f7de15b0fad43c4cabc3b131d9ca482288b241c4"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.576857 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" podStartSLOduration=121.576832084 podStartE2EDuration="2m1.576832084s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.50755886 +0000 UTC m=+141.280451188" watchObservedRunningTime="2025-12-02 18:38:30.576832084 +0000 UTC m=+141.349724412" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.577164 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vf9nc" podStartSLOduration=121.577158623 podStartE2EDuration="2m1.577158623s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.543088616 +0000 UTC m=+141.315980944" watchObservedRunningTime="2025-12-02 18:38:30.577158623 +0000 UTC m=+141.350050951" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.605874 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-jcpdc" podStartSLOduration=122.605848175 podStartE2EDuration="2m2.605848175s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.593818321 +0000 UTC m=+141.366710669" watchObservedRunningTime="2025-12-02 18:38:30.605848175 +0000 UTC m=+141.378740503" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.640368 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-z9h9q" event={"ID":"67b8fcaa-f575-4b23-8dbe-9c2402abba3d","Type":"ContainerStarted","Data":"491837e253dc5e65557d482197a2dc96e4e4685b28ae0a1e79b404779f354db4"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.668688 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.670233 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.170213257 +0000 UTC m=+141.943105585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.675056 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" event={"ID":"805371d5-27c1-4895-aab2-23e8e1d9a91b","Type":"ContainerStarted","Data":"e3fe5ad6405a3122e9bc33a50f132e54023d4bf0b0a29bb727f17ef5a3cf6408"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.685129 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.688916 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wmll4" podStartSLOduration=122.68889639 podStartE2EDuration="2m2.68889639s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.684971344 +0000 UTC m=+141.457863672" watchObservedRunningTime="2025-12-02 18:38:30.68889639 +0000 UTC m=+141.461788708" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.690634 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" podStartSLOduration=121.690618606 podStartE2EDuration="2m1.690618606s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.641052022 +0000 UTC m=+141.413944350" watchObservedRunningTime="2025-12-02 18:38:30.690618606 +0000 UTC m=+141.463510934" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.704738 4792 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-7txmt container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" start-of-body= Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.704826 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" podUID="805371d5-27c1-4895-aab2-23e8e1d9a91b" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.707008 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-qjpwp" podStartSLOduration=7.706984887 podStartE2EDuration="7.706984887s" podCreationTimestamp="2025-12-02 18:38:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.705278481 +0000 UTC m=+141.478170809" watchObservedRunningTime="2025-12-02 18:38:30.706984887 +0000 UTC m=+141.479877205" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.768139 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" podStartSLOduration=121.768117622 podStartE2EDuration="2m1.768117622s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.737904679 +0000 UTC m=+141.510797007" watchObservedRunningTime="2025-12-02 18:38:30.768117622 +0000 UTC m=+141.541009950" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.777429 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.778840 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.278810379 +0000 UTC m=+142.051702757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.800052 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" event={"ID":"e48d276e-2b89-494b-bbf6-8471a336a7a0","Type":"ContainerStarted","Data":"1ddb331eb9e94017b700c410fb7bee64bc96f86ac72bdae31bfe76e38f5d5a93"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.856944 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wpgk7" podStartSLOduration=121.856922781 podStartE2EDuration="2m1.856922781s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:30.842302008 +0000 UTC m=+141.615194336" watchObservedRunningTime="2025-12-02 18:38:30.856922781 +0000 UTC m=+141.629815109" Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.857687 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qrcm2"] Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.883779 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.884177 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.384162134 +0000 UTC m=+142.157054462 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.892032 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" event={"ID":"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac","Type":"ContainerStarted","Data":"7ad11e0784ca51d85779e5a08f2eb5e232f9532b6462aaa16f07fe94f3ab7c49"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.942894 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" event={"ID":"a3026143-1986-44ad-b5e7-fbae179a6503","Type":"ContainerStarted","Data":"bfde2db6057746868c993dca002de73796a69f9a82fb955832649ad400755a9a"} Dec 02 18:38:30 crc kubenswrapper[4792]: I1202 18:38:30.986882 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:30 crc kubenswrapper[4792]: E1202 18:38:30.988425 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.488400899 +0000 UTC m=+142.261293227 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.016180 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" event={"ID":"455642ee-8453-433f-a998-ea30f5baadef","Type":"ContainerStarted","Data":"d4c8340d827ef4392cecb63345b79f962f320058819a6342dd7365bfc87e7095"} Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.023362 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lspv9" podStartSLOduration=122.023343289 podStartE2EDuration="2m2.023343289s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:31.022746173 +0000 UTC m=+141.795638501" watchObservedRunningTime="2025-12-02 18:38:31.023343289 +0000 UTC m=+141.796235617" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.055435 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hrlnq"] Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.056646 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.071865 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.084111 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" event={"ID":"139247ce-bdc4-46c8-8acb-0120504e8855","Type":"ContainerStarted","Data":"74cdb26c9c33427daae9a95a5cceb3386b73da246d274c508f3a147f6e8e23e4"} Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.089382 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrlnq"] Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.090470 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.098315 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.598298166 +0000 UTC m=+142.371190494 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.192057 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.192361 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-catalog-content\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.192386 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ws67\" (UniqueName: \"kubernetes.io/projected/c118dcaa-7c35-4a91-9b16-b3796e95fa86-kube-api-access-9ws67\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.192460 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-utilities\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.194742 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.69472533 +0000 UTC m=+142.467617658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.225370 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" podStartSLOduration=122.225341244 podStartE2EDuration="2m2.225341244s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:31.223111184 +0000 UTC m=+141.996003522" watchObservedRunningTime="2025-12-02 18:38:31.225341244 +0000 UTC m=+141.998233572" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.312984 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.313057 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-catalog-content\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.313081 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ws67\" (UniqueName: \"kubernetes.io/projected/c118dcaa-7c35-4a91-9b16-b3796e95fa86-kube-api-access-9ws67\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.313155 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-utilities\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.340075 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-catalog-content\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.356951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-utilities\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.387298 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.887278101 +0000 UTC m=+142.660170429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.419459 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.420325 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:31.92030045 +0000 UTC m=+142.693192778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.426298 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ws67\" (UniqueName: \"kubernetes.io/projected/c118dcaa-7c35-4a91-9b16-b3796e95fa86-kube-api-access-9ws67\") pod \"redhat-marketplace-hrlnq\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.448167 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b92km"] Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.473202 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.481828 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b92km"] Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.528115 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.528555 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.028540432 +0000 UTC m=+142.801432760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.536369 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.543054 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:31 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:31 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:31 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.543131 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.629082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.629456 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbmnl\" (UniqueName: \"kubernetes.io/projected/a691800f-1f6a-4e86-b97a-73d8181b39d5-kube-api-access-fbmnl\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.629535 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-utilities\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.629585 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-catalog-content\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.629732 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.129690964 +0000 UTC m=+142.902583292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.732488 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.733063 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbmnl\" (UniqueName: \"kubernetes.io/projected/a691800f-1f6a-4e86-b97a-73d8181b39d5-kube-api-access-fbmnl\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.733118 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-utilities\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.733154 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-catalog-content\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.733773 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-catalog-content\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.734113 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.234094603 +0000 UTC m=+143.006986931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.734703 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-utilities\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.737072 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4kkfv"] Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.771388 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbmnl\" (UniqueName: \"kubernetes.io/projected/a691800f-1f6a-4e86-b97a-73d8181b39d5-kube-api-access-fbmnl\") pod \"redhat-marketplace-b92km\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.834620 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.836203 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.836635 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.336618592 +0000 UTC m=+143.109510920 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:31 crc kubenswrapper[4792]: I1202 18:38:31.941372 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:31 crc kubenswrapper[4792]: E1202 18:38:31.942111 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.44209793 +0000 UTC m=+143.214990258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.042299 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c85tx"] Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.043768 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.044266 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.544249629 +0000 UTC m=+143.317141957 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.044375 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.052924 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c85tx"] Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.055892 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.146506 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.146613 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxgtv\" (UniqueName: \"kubernetes.io/projected/715f891b-4e52-454b-b5c0-22694ef088e8-kube-api-access-xxgtv\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.146640 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-utilities\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.146675 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-catalog-content\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.147072 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.647042095 +0000 UTC m=+143.419934423 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.161826 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" event={"ID":"3ed6603d-effa-496b-9ed3-16918cb79b7b","Type":"ContainerStarted","Data":"4cfad70305ae2f11e7ef97b7033893d2ad182af7404554b886ef59191d574af6"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.189968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" event={"ID":"62fcc2b1-4061-42e1-a3f9-15fd336c5e38","Type":"ContainerStarted","Data":"132e4e0e7c0463eaffa86a061701fd4f26254adc019285cd90cae0358f01119b"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.193369 4792 generic.go:334] "Generic (PLEG): container finished" podID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerID="5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc" exitCode=0 Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.194163 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6vhp" event={"ID":"bba4322e-397d-4b6a-b52c-14dfeecbf071","Type":"ContainerDied","Data":"5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.194200 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6vhp" event={"ID":"bba4322e-397d-4b6a-b52c-14dfeecbf071","Type":"ContainerStarted","Data":"f26e5b950ce6a1657f6bc0e3c4ab8103cb57a2c97e281c4ac4a7b6bdc0a824d8"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.199890 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.203200 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdh2w" podStartSLOduration=123.203183806 podStartE2EDuration="2m3.203183806s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.200710439 +0000 UTC m=+142.973602767" watchObservedRunningTime="2025-12-02 18:38:32.203183806 +0000 UTC m=+142.976076134" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.215942 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x7tgb"] Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.217202 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.226319 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cds6s" event={"ID":"95d5c817-e302-4f46-9db2-333b21486a7c","Type":"ContainerStarted","Data":"aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.239164 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" event={"ID":"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e","Type":"ContainerStarted","Data":"867da0d29eaf0ca4fdc1221675df7ae57c9d82d92a454a7247542a3e0995ed46"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.239217 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" event={"ID":"ae2be54d-c2f0-427a-bccd-c63bad0a0f0e","Type":"ContainerStarted","Data":"19c9f44a42d65c047ade334faeb58a8ba99fa5ba8e26b0c6609faf40bd40b158"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.239961 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.248308 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.248785 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxgtv\" (UniqueName: \"kubernetes.io/projected/715f891b-4e52-454b-b5c0-22694ef088e8-kube-api-access-xxgtv\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.248814 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-utilities\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.248845 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-catalog-content\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.249860 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.749843201 +0000 UTC m=+143.522735529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.250901 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-utilities\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.251151 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-catalog-content\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.259250 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7tgb"] Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.259733 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" event={"ID":"788d1186-ade1-4f2c-acaa-e0030baa277c","Type":"ContainerStarted","Data":"7b05c5f9b5a0c5c26c96866b2c8d78601d743302e6a5ba93fc297491eaef5963"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.294920 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" event={"ID":"455642ee-8453-433f-a998-ea30f5baadef","Type":"ContainerStarted","Data":"f8b735af771fc97aef27f4fdcdde637839239b0fcf8ad4f9a19f775b89a01929"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.311728 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxgtv\" (UniqueName: \"kubernetes.io/projected/715f891b-4e52-454b-b5c0-22694ef088e8-kube-api-access-xxgtv\") pod \"redhat-operators-c85tx\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.314158 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" podStartSLOduration=124.314137051 podStartE2EDuration="2m4.314137051s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.272765378 +0000 UTC m=+143.045657716" watchObservedRunningTime="2025-12-02 18:38:32.314137051 +0000 UTC m=+143.087029379" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.321849 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" event={"ID":"ff2b39c3-c4a3-4d87-b4bd-474d89c04db7","Type":"ContainerStarted","Data":"952b9d7a26b0824dd26f28c310fe80a94b44bef5e592a3076c89c43bf35fd929"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.326167 4792 generic.go:334] "Generic (PLEG): container finished" podID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerID="7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336" exitCode=0 Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.326240 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z4jbn" event={"ID":"77b66620-e883-40d8-8294-b6b4a2f3ad8c","Type":"ContainerDied","Data":"7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.326287 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z4jbn" event={"ID":"77b66620-e883-40d8-8294-b6b4a2f3ad8c","Type":"ContainerStarted","Data":"fb0d3350b33429eee2e7735c63c8f8c805033923e2529a44316727ea629ceaf7"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.333794 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-72k72" event={"ID":"649496fd-1b8f-429a-be02-160ecc5b2ab9","Type":"ContainerStarted","Data":"dcb70de893f426f598ee50b63727e91e793177505abeee2e313953fd16b25b57"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.350958 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrh79\" (UniqueName: \"kubernetes.io/projected/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-kube-api-access-vrh79\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.351031 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-utilities\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.351195 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.351253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-catalog-content\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.376495 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.876477139 +0000 UTC m=+143.649369467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.391227 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" event={"ID":"5fede595-7ee5-4aaf-999d-45eebf4ca097","Type":"ContainerStarted","Data":"65e8b52daa1d961f86c8b88b206034b86068010c0e18bd9713f85cb37a1df95f"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.394110 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.411005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" event={"ID":"2f18da50-4c56-4f61-bcf4-583bec642127","Type":"ContainerStarted","Data":"77d1e0294790faa791eb3aa08979624caa24108de7f4f2100a7c99677e603f69"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.414483 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" podStartSLOduration=123.41444385 podStartE2EDuration="2m3.41444385s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.397632008 +0000 UTC m=+143.170524336" watchObservedRunningTime="2025-12-02 18:38:32.41444385 +0000 UTC m=+143.187336178" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.439477 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.442380 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" event={"ID":"d4f401bf-9a39-4f1b-a24f-9f5db5a36e40","Type":"ContainerStarted","Data":"0c22c89d499b2ccdf2f8374a4ff05f7ec55f4dfe82190b03f8056c07b8d19c20"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.453276 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.453670 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-catalog-content\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.453760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrh79\" (UniqueName: \"kubernetes.io/projected/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-kube-api-access-vrh79\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.453827 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-utilities\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.454255 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-utilities\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.454988 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:32.954970191 +0000 UTC m=+143.727862519 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.456243 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-catalog-content\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.492764 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kkfv" event={"ID":"a2745526-4b6e-4ccb-82d9-106a0bf83b74","Type":"ContainerStarted","Data":"7815ec0c73e128ddeb495ac27451518cfc296f7d2af291a8134545d730c9b10e"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.495468 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-cds6s" podStartSLOduration=124.49544239 podStartE2EDuration="2m4.49544239s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.486334445 +0000 UTC m=+143.259226763" watchObservedRunningTime="2025-12-02 18:38:32.49544239 +0000 UTC m=+143.268334728" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.501247 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrlnq"] Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.519885 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrh79\" (UniqueName: \"kubernetes.io/projected/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-kube-api-access-vrh79\") pod \"redhat-operators-x7tgb\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.550587 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:32 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:32 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:32 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.550643 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.551184 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" event={"ID":"e690e93c-92e0-4045-88b5-685763786d3a","Type":"ContainerStarted","Data":"7188347be881fd5302e80cba6f9ec1278060d262c45f53356b1c41e294e1e66a"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.551218 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" event={"ID":"e690e93c-92e0-4045-88b5-685763786d3a","Type":"ContainerStarted","Data":"0b4255b08334b67b357b15fd2d2dbf106becb0d859ebe140c90649f6c3cdfaf8"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.555455 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.556830 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.056817371 +0000 UTC m=+143.829709689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.559046 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.574042 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-r4dvq" podStartSLOduration=123.574021484 podStartE2EDuration="2m3.574021484s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.539026722 +0000 UTC m=+143.311919070" watchObservedRunningTime="2025-12-02 18:38:32.574021484 +0000 UTC m=+143.346913802" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.594897 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" event={"ID":"c0ecd9f5-ffc9-4a7a-8d6f-96781e7577ac","Type":"ContainerStarted","Data":"23f885ca330d5fc3e6aa810854344507854968e3efbd7cd246a7597c76009a78"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.596979 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.614788 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" podStartSLOduration=123.61475781 podStartE2EDuration="2m3.61475781s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.614584886 +0000 UTC m=+143.387477214" watchObservedRunningTime="2025-12-02 18:38:32.61475781 +0000 UTC m=+143.387650138" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.626655 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" event={"ID":"64e0e779-21ef-489e-8721-54533e24bf31","Type":"ContainerStarted","Data":"de3a76cc713bc3cff53df24c8963afadeab33b153745e493a8423518f4bdf21e"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.653898 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" event={"ID":"ab249412-8c3a-4e5f-b84a-374b19cc1dc9","Type":"ContainerStarted","Data":"a145eade2c7870a50a2ded2d47f9e134150e97e6cd1af473f97c07ccfeb00b9d"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.655458 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nzgzn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.655514 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.656561 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.657889 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.15787168 +0000 UTC m=+143.930764008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.678030 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrcm2" event={"ID":"0ecd7ddf-ad6e-4f71-8d80-5626f4830547","Type":"ContainerStarted","Data":"cbd9fb99d7ae0b1c9605a22aa996710599309566b44a950c6cbee5275a5199e4"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.706377 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bjntn" podStartSLOduration=123.706361225 podStartE2EDuration="2m3.706361225s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.703898249 +0000 UTC m=+143.476790577" watchObservedRunningTime="2025-12-02 18:38:32.706361225 +0000 UTC m=+143.479253553" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.714828 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" event={"ID":"805371d5-27c1-4895-aab2-23e8e1d9a91b","Type":"ContainerStarted","Data":"16a0a2a5b6089527e6b74f86c8a4f77a6208f608c88dc50189e4a30454a63f41"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.752717 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-z9h9q" event={"ID":"67b8fcaa-f575-4b23-8dbe-9c2402abba3d","Type":"ContainerStarted","Data":"3326a56025ffc2f3e33aede240d8c7dcad440c67a538ca72b3eb7f631b99f8cc"} Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.753075 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-jcpdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.753109 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.753133 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jcpdc" podUID="cff41058-c666-4dc1-a119-54f157de50b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.758625 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.766078 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.266051811 +0000 UTC m=+144.038944139 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.771029 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7txmt" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.774988 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.815558 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-khg9t" podStartSLOduration=123.815396619 podStartE2EDuration="2m3.815396619s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.746457864 +0000 UTC m=+143.519350192" watchObservedRunningTime="2025-12-02 18:38:32.815396619 +0000 UTC m=+143.588288947" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.816271 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" podStartSLOduration=123.816266462 podStartE2EDuration="2m3.816266462s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.815647096 +0000 UTC m=+143.588539444" watchObservedRunningTime="2025-12-02 18:38:32.816266462 +0000 UTC m=+143.589158790" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.859569 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.863073 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.363054051 +0000 UTC m=+144.135946379 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.963826 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" podStartSLOduration=124.963802742 podStartE2EDuration="2m4.963802742s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:32.857328357 +0000 UTC m=+143.630220695" watchObservedRunningTime="2025-12-02 18:38:32.963802742 +0000 UTC m=+143.736695080" Dec 02 18:38:32 crc kubenswrapper[4792]: I1202 18:38:32.965459 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:32 crc kubenswrapper[4792]: E1202 18:38:32.965956 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.46593915 +0000 UTC m=+144.238831478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.054076 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ggtd2" Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.071096 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.071552 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.571535501 +0000 UTC m=+144.344427829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.107293 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-f89mv" podStartSLOduration=124.107274693 podStartE2EDuration="2m4.107274693s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:33.045580193 +0000 UTC m=+143.818472521" watchObservedRunningTime="2025-12-02 18:38:33.107274693 +0000 UTC m=+143.880167021" Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.109419 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-72k72" podStartSLOduration=124.10940976 podStartE2EDuration="2m4.10940976s" podCreationTimestamp="2025-12-02 18:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:33.107878739 +0000 UTC m=+143.880771067" watchObservedRunningTime="2025-12-02 18:38:33.10940976 +0000 UTC m=+143.882302088" Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.172670 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.173064 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.673052493 +0000 UTC m=+144.445944811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.196883 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" podStartSLOduration=125.196857463 podStartE2EDuration="2m5.196857463s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:33.169374894 +0000 UTC m=+143.942267212" watchObservedRunningTime="2025-12-02 18:38:33.196857463 +0000 UTC m=+143.969749791" Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.199069 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b92km"] Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.275455 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.276260 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.776239399 +0000 UTC m=+144.549131727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.377085 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.377514 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.877501844 +0000 UTC m=+144.650394162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.481251 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.481715 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.981689148 +0000 UTC m=+144.754581466 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.481944 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.482394 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:33.982386456 +0000 UTC m=+144.755278784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.538313 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:33 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:33 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:33 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.538369 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.578048 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7tgb"] Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.583182 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.588496 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:34.088476961 +0000 UTC m=+144.861369279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.600957 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-z9h9q" podStartSLOduration=10.600939646 podStartE2EDuration="10.600939646s" podCreationTimestamp="2025-12-02 18:38:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:33.59997995 +0000 UTC m=+144.372872288" watchObservedRunningTime="2025-12-02 18:38:33.600939646 +0000 UTC m=+144.373831974" Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.609935 4792 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.689819 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.690357 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:34.190341162 +0000 UTC m=+144.963233490 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.711603 4792 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-02T18:38:33.609967019Z","Handler":null,"Name":""} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.745669 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c85tx"] Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.788324 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zs9kz" event={"ID":"788d1186-ade1-4f2c-acaa-e0030baa277c","Type":"ContainerStarted","Data":"dda1eaa4f622d6f4bd925906c0a33f39ea04a688f91a7a7c3fc413e10046584d"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.793079 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.793597 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:34.29358085 +0000 UTC m=+145.066473178 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.826261 4792 generic.go:334] "Generic (PLEG): container finished" podID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerID="da5461fc8187e03154fd11b2842d2b9ad060b77d37ba7a748523801a71629168" exitCode=0 Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.826369 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kkfv" event={"ID":"a2745526-4b6e-4ccb-82d9-106a0bf83b74","Type":"ContainerDied","Data":"da5461fc8187e03154fd11b2842d2b9ad060b77d37ba7a748523801a71629168"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.857804 4792 generic.go:334] "Generic (PLEG): container finished" podID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerID="98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde" exitCode=0 Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.858356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrlnq" event={"ID":"c118dcaa-7c35-4a91-9b16-b3796e95fa86","Type":"ContainerDied","Data":"98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.858394 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrlnq" event={"ID":"c118dcaa-7c35-4a91-9b16-b3796e95fa86","Type":"ContainerStarted","Data":"9d6bfb666b1b5efc1beac839c2ce935311f349c8ee335b2e4f1e91e77ebeb3d7"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.876084 4792 generic.go:334] "Generic (PLEG): container finished" podID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerID="094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab" exitCode=0 Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.876199 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b92km" event={"ID":"a691800f-1f6a-4e86-b97a-73d8181b39d5","Type":"ContainerDied","Data":"094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.876228 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b92km" event={"ID":"a691800f-1f6a-4e86-b97a-73d8181b39d5","Type":"ContainerStarted","Data":"6775bc37b8dccdd1e74048f6e56472b424be49ea7c164876bfe33176727ad2cf"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.894580 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:33 crc kubenswrapper[4792]: E1202 18:38:33.895425 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:34.395403 +0000 UTC m=+145.168295328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.900310 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" event={"ID":"2f18da50-4c56-4f61-bcf4-583bec642127","Type":"ContainerStarted","Data":"ef6a86990af739574d6e76b536f5c8f959156b7914505b5582b2b21397199313"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.900373 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" event={"ID":"2f18da50-4c56-4f61-bcf4-583bec642127","Type":"ContainerStarted","Data":"301778e1f391ee6be13b985e1b709ee8b4c74635ba3a32305ec6bf3e3131b72d"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.904390 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerID="5ada913068d4c5a7f20c63b435cd5ca40f0939da6fbc2635a605807efad16db9" exitCode=0 Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.904479 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrcm2" event={"ID":"0ecd7ddf-ad6e-4f71-8d80-5626f4830547","Type":"ContainerDied","Data":"5ada913068d4c5a7f20c63b435cd5ca40f0939da6fbc2635a605807efad16db9"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.921705 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7tgb" event={"ID":"8bad1c4d-9724-410d-8651-0a8c7c1d92b3","Type":"ContainerStarted","Data":"364c5f70b78544589c3ca3ce547b2821f73aa738f03d5269673a6bcc1b89992e"} Dec 02 18:38:33 crc kubenswrapper[4792]: I1202 18:38:33.987916 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:33.997882 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:34 crc kubenswrapper[4792]: E1202 18:38:33.999136 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 18:38:34.49909752 +0000 UTC m=+145.271989848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.002115 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:34 crc kubenswrapper[4792]: E1202 18:38:34.007225 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 18:38:34.507214318 +0000 UTC m=+145.280106646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-nr8ml" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.021593 4792 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.021650 4792 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.146212 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.178033 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.248156 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.299339 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.299414 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.359462 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.360294 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.362546 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-nr8ml\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.370308 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.370567 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.395969 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.454537 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cff935f8-c210-467e-937b-94dc8990998e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.454722 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cff935f8-c210-467e-937b-94dc8990998e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.478890 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.530376 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:34 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:34 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:34 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.530453 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.556910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cff935f8-c210-467e-937b-94dc8990998e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.556996 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cff935f8-c210-467e-937b-94dc8990998e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.557059 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cff935f8-c210-467e-937b-94dc8990998e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.600853 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cff935f8-c210-467e-937b-94dc8990998e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.697900 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.946326 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nr8ml"] Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.955502 4792 generic.go:334] "Generic (PLEG): container finished" podID="715f891b-4e52-454b-b5c0-22694ef088e8" containerID="b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f" exitCode=0 Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.955956 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c85tx" event={"ID":"715f891b-4e52-454b-b5c0-22694ef088e8","Type":"ContainerDied","Data":"b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f"} Dec 02 18:38:34 crc kubenswrapper[4792]: I1202 18:38:34.956385 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c85tx" event={"ID":"715f891b-4e52-454b-b5c0-22694ef088e8","Type":"ContainerStarted","Data":"c6cee850e07151adb91fbece71dc354728d6a16354f945fb539951933e2e498d"} Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.008717 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" event={"ID":"2f18da50-4c56-4f61-bcf4-583bec642127","Type":"ContainerStarted","Data":"8480328a48e0c92c03f1fc2e370cb62370d0e2aca36efa02fd2374a1a8eb0f00"} Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.020356 4792 generic.go:334] "Generic (PLEG): container finished" podID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerID="482a4d0ec46f163b9400f7bd7b2a125a9fe817f61065558a7d7b677b065b127c" exitCode=0 Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.020708 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7tgb" event={"ID":"8bad1c4d-9724-410d-8651-0a8c7c1d92b3","Type":"ContainerDied","Data":"482a4d0ec46f163b9400f7bd7b2a125a9fe817f61065558a7d7b677b065b127c"} Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.043927 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nplg6" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.053274 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-r6ncj" podStartSLOduration=12.053248014 podStartE2EDuration="12.053248014s" podCreationTimestamp="2025-12-02 18:38:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:35.043403089 +0000 UTC m=+145.816295427" watchObservedRunningTime="2025-12-02 18:38:35.053248014 +0000 UTC m=+145.826140362" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.113103 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.531276 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:35 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:35 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:35 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.531715 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.570839 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.722572 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.722955 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.730783 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.851147 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.851207 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:35 crc kubenswrapper[4792]: I1202 18:38:35.858632 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.005273 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.005344 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.005406 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.005433 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.007738 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.012161 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.012318 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.024227 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.040863 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" event={"ID":"c586142b-c192-4706-9026-bcf666e8f7c6","Type":"ContainerStarted","Data":"613de27f91fff11016d963e1ea569a21e5b50e07c0e5f9778b557f93a83f8c13"} Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.040915 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" event={"ID":"c586142b-c192-4706-9026-bcf666e8f7c6","Type":"ContainerStarted","Data":"5511b3eb5519048f2c7f42af62cd3094851b16f10ef5b843ab87fa185737aaec"} Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.041462 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.042908 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"cff935f8-c210-467e-937b-94dc8990998e","Type":"ContainerStarted","Data":"b0a10ac0ceb203db685405d31ea4ed84c70e4b6ed7783cfbba282ad0862ad486"} Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.048496 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-wh7rt" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.050381 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j7k6j" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.114099 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" podStartSLOduration=128.114076379 podStartE2EDuration="2m8.114076379s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:36.064382092 +0000 UTC m=+146.837274440" watchObservedRunningTime="2025-12-02 18:38:36.114076379 +0000 UTC m=+146.886968707" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.189185 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.205678 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.222909 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.468995 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-jcpdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.469453 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jcpdc" podUID="cff41058-c666-4dc1-a119-54f157de50b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.469315 4792 patch_prober.go:28] interesting pod/downloads-7954f5f757-jcpdc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.469567 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-jcpdc" podUID="cff41058-c666-4dc1-a119-54f157de50b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.529640 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.534017 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:36 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:36 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:36 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.534081 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.759402 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.759485 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.767778 4792 patch_prober.go:28] interesting pod/console-f9d7485db-cds6s container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Dec 02 18:38:36 crc kubenswrapper[4792]: I1202 18:38:36.768244 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cds6s" podUID="95d5c817-e302-4f46-9db2-333b21486a7c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" Dec 02 18:38:36 crc kubenswrapper[4792]: W1202 18:38:36.957780 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-7a85f821e5740d99eb46bf055eccf4c406323c676bdb70beefa065c6681b4c28 WatchSource:0}: Error finding container 7a85f821e5740d99eb46bf055eccf4c406323c676bdb70beefa065c6681b4c28: Status 404 returned error can't find the container with id 7a85f821e5740d99eb46bf055eccf4c406323c676bdb70beefa065c6681b4c28 Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.070355 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"9af34eb69fd86b459ff3251a1157d7e225034a3199ec4976fb2ae48a1f16278e"} Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.089613 4792 generic.go:334] "Generic (PLEG): container finished" podID="cff935f8-c210-467e-937b-94dc8990998e" containerID="2a83ea304807100852f04d38991f8468bf6fc05f697074c5541b6d6aea0958dd" exitCode=0 Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.089678 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"cff935f8-c210-467e-937b-94dc8990998e","Type":"ContainerDied","Data":"2a83ea304807100852f04d38991f8468bf6fc05f697074c5541b6d6aea0958dd"} Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.098840 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"517101d23c67977cc36814208653b5531b86fb8bd1ee17b37b6e0c7fbb536fda"} Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.098892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7a759bdf25eec31ce3b1a26237e1a729ba0e0c321b417f05430a75bbde644a4e"} Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.099659 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.110132 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"7a85f821e5740d99eb46bf055eccf4c406323c676bdb70beefa065c6681b4c28"} Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.530639 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:37 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:37 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:37 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:37 crc kubenswrapper[4792]: I1202 18:38:37.531032 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.081891 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.081984 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.188343 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"fbbb414243710097f26367b72bb1de78c97ddba4e114da549a4fab59d5f73f53"} Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.193213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ca2f2eebc264009f6081bb6d959088d4ac4840e98e5ba4deaa8c2ab2efbe334c"} Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.532861 4792 patch_prober.go:28] interesting pod/router-default-5444994796-d9jsl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 18:38:38 crc kubenswrapper[4792]: [-]has-synced failed: reason withheld Dec 02 18:38:38 crc kubenswrapper[4792]: [+]process-running ok Dec 02 18:38:38 crc kubenswrapper[4792]: healthz check failed Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.533330 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d9jsl" podUID="05a76a9b-9018-4c8c-a2c7-a83641ca60b7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.589208 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.590343 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.590419 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.594066 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.595252 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.596959 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.675794 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cff935f8-c210-467e-937b-94dc8990998e-kube-api-access\") pod \"cff935f8-c210-467e-937b-94dc8990998e\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.675855 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cff935f8-c210-467e-937b-94dc8990998e-kubelet-dir\") pod \"cff935f8-c210-467e-937b-94dc8990998e\" (UID: \"cff935f8-c210-467e-937b-94dc8990998e\") " Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.676125 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47e877ea-e045-4885-a7e0-da47996581b3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.676167 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47e877ea-e045-4885-a7e0-da47996581b3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.676326 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cff935f8-c210-467e-937b-94dc8990998e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "cff935f8-c210-467e-937b-94dc8990998e" (UID: "cff935f8-c210-467e-937b-94dc8990998e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.682112 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff935f8-c210-467e-937b-94dc8990998e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "cff935f8-c210-467e-937b-94dc8990998e" (UID: "cff935f8-c210-467e-937b-94dc8990998e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.777924 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47e877ea-e045-4885-a7e0-da47996581b3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.777998 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47e877ea-e045-4885-a7e0-da47996581b3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.778113 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cff935f8-c210-467e-937b-94dc8990998e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.778127 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cff935f8-c210-467e-937b-94dc8990998e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.778185 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47e877ea-e045-4885-a7e0-da47996581b3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.800219 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47e877ea-e045-4885-a7e0-da47996581b3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:38 crc kubenswrapper[4792]: I1202 18:38:38.919410 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.224619 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"cff935f8-c210-467e-937b-94dc8990998e","Type":"ContainerDied","Data":"b0a10ac0ceb203db685405d31ea4ed84c70e4b6ed7783cfbba282ad0862ad486"} Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.224663 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0a10ac0ceb203db685405d31ea4ed84c70e4b6ed7783cfbba282ad0862ad486" Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.224763 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.285550 4792 generic.go:334] "Generic (PLEG): container finished" podID="64e0e779-21ef-489e-8721-54533e24bf31" containerID="de3a76cc713bc3cff53df24c8963afadeab33b153745e493a8423518f4bdf21e" exitCode=0 Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.285515 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" event={"ID":"64e0e779-21ef-489e-8721-54533e24bf31","Type":"ContainerDied","Data":"de3a76cc713bc3cff53df24c8963afadeab33b153745e493a8423518f4bdf21e"} Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.360509 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.532670 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:39 crc kubenswrapper[4792]: I1202 18:38:39.538093 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-d9jsl" Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.333049 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"47e877ea-e045-4885-a7e0-da47996581b3","Type":"ContainerStarted","Data":"b892e37794b1324a2f7421166375c7ced75c9d504a6702e8beace3555f9130c1"} Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.725425 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.819186 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e0e779-21ef-489e-8721-54533e24bf31-config-volume\") pod \"64e0e779-21ef-489e-8721-54533e24bf31\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.819560 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e0e779-21ef-489e-8721-54533e24bf31-secret-volume\") pod \"64e0e779-21ef-489e-8721-54533e24bf31\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.819712 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjclj\" (UniqueName: \"kubernetes.io/projected/64e0e779-21ef-489e-8721-54533e24bf31-kube-api-access-mjclj\") pod \"64e0e779-21ef-489e-8721-54533e24bf31\" (UID: \"64e0e779-21ef-489e-8721-54533e24bf31\") " Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.820888 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64e0e779-21ef-489e-8721-54533e24bf31-config-volume" (OuterVolumeSpecName: "config-volume") pod "64e0e779-21ef-489e-8721-54533e24bf31" (UID: "64e0e779-21ef-489e-8721-54533e24bf31"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.826600 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e0e779-21ef-489e-8721-54533e24bf31-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "64e0e779-21ef-489e-8721-54533e24bf31" (UID: "64e0e779-21ef-489e-8721-54533e24bf31"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.830471 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e0e779-21ef-489e-8721-54533e24bf31-kube-api-access-mjclj" (OuterVolumeSpecName: "kube-api-access-mjclj") pod "64e0e779-21ef-489e-8721-54533e24bf31" (UID: "64e0e779-21ef-489e-8721-54533e24bf31"). InnerVolumeSpecName "kube-api-access-mjclj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.921024 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/64e0e779-21ef-489e-8721-54533e24bf31-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.921064 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjclj\" (UniqueName: \"kubernetes.io/projected/64e0e779-21ef-489e-8721-54533e24bf31-kube-api-access-mjclj\") on node \"crc\" DevicePath \"\"" Dec 02 18:38:40 crc kubenswrapper[4792]: I1202 18:38:40.921076 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/64e0e779-21ef-489e-8721-54533e24bf31-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 18:38:41 crc kubenswrapper[4792]: I1202 18:38:41.334029 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-z9h9q" Dec 02 18:38:41 crc kubenswrapper[4792]: I1202 18:38:41.377774 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"47e877ea-e045-4885-a7e0-da47996581b3","Type":"ContainerStarted","Data":"a57c80886b44f29910d56022fae5ddefffc5bf2cd2ac98f26e4cc9c0c6d81718"} Dec 02 18:38:41 crc kubenswrapper[4792]: I1202 18:38:41.388804 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" event={"ID":"64e0e779-21ef-489e-8721-54533e24bf31","Type":"ContainerDied","Data":"9036ab47a42d35be93ca40cdb8543c12f9d29f923ee09c16cf16b66ee6c0b2f8"} Dec 02 18:38:41 crc kubenswrapper[4792]: I1202 18:38:41.388859 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9036ab47a42d35be93ca40cdb8543c12f9d29f923ee09c16cf16b66ee6c0b2f8" Dec 02 18:38:41 crc kubenswrapper[4792]: I1202 18:38:41.388976 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw" Dec 02 18:38:41 crc kubenswrapper[4792]: I1202 18:38:41.402326 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.402303543 podStartE2EDuration="3.402303543s" podCreationTimestamp="2025-12-02 18:38:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:38:41.399003444 +0000 UTC m=+152.171895762" watchObservedRunningTime="2025-12-02 18:38:41.402303543 +0000 UTC m=+152.175195871" Dec 02 18:38:42 crc kubenswrapper[4792]: I1202 18:38:42.412615 4792 generic.go:334] "Generic (PLEG): container finished" podID="47e877ea-e045-4885-a7e0-da47996581b3" containerID="a57c80886b44f29910d56022fae5ddefffc5bf2cd2ac98f26e4cc9c0c6d81718" exitCode=0 Dec 02 18:38:42 crc kubenswrapper[4792]: I1202 18:38:42.412984 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"47e877ea-e045-4885-a7e0-da47996581b3","Type":"ContainerDied","Data":"a57c80886b44f29910d56022fae5ddefffc5bf2cd2ac98f26e4cc9c0c6d81718"} Dec 02 18:38:46 crc kubenswrapper[4792]: I1202 18:38:46.474367 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-jcpdc" Dec 02 18:38:46 crc kubenswrapper[4792]: I1202 18:38:46.765756 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:46 crc kubenswrapper[4792]: I1202 18:38:46.769865 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:38:51 crc kubenswrapper[4792]: I1202 18:38:51.705994 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:51 crc kubenswrapper[4792]: I1202 18:38:51.716353 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/778806a7-7e6f-4776-8233-b42b296ebc52-metrics-certs\") pod \"network-metrics-daemon-2ls4m\" (UID: \"778806a7-7e6f-4776-8233-b42b296ebc52\") " pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:51 crc kubenswrapper[4792]: I1202 18:38:51.770107 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-2ls4m" Dec 02 18:38:52 crc kubenswrapper[4792]: I1202 18:38:52.889194 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.063349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47e877ea-e045-4885-a7e0-da47996581b3-kubelet-dir\") pod \"47e877ea-e045-4885-a7e0-da47996581b3\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.063625 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47e877ea-e045-4885-a7e0-da47996581b3-kube-api-access\") pod \"47e877ea-e045-4885-a7e0-da47996581b3\" (UID: \"47e877ea-e045-4885-a7e0-da47996581b3\") " Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.063619 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47e877ea-e045-4885-a7e0-da47996581b3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "47e877ea-e045-4885-a7e0-da47996581b3" (UID: "47e877ea-e045-4885-a7e0-da47996581b3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.064807 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/47e877ea-e045-4885-a7e0-da47996581b3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.070542 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47e877ea-e045-4885-a7e0-da47996581b3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "47e877ea-e045-4885-a7e0-da47996581b3" (UID: "47e877ea-e045-4885-a7e0-da47996581b3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.165631 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47e877ea-e045-4885-a7e0-da47996581b3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.513665 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"47e877ea-e045-4885-a7e0-da47996581b3","Type":"ContainerDied","Data":"b892e37794b1324a2f7421166375c7ced75c9d504a6702e8beace3555f9130c1"} Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.513722 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b892e37794b1324a2f7421166375c7ced75c9d504a6702e8beace3555f9130c1" Dec 02 18:38:53 crc kubenswrapper[4792]: I1202 18:38:53.513787 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 18:38:54 crc kubenswrapper[4792]: I1202 18:38:54.494870 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:39:06 crc kubenswrapper[4792]: I1202 18:39:06.532302 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 18:39:06 crc kubenswrapper[4792]: I1202 18:39:06.587373 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rfct2" Dec 02 18:39:08 crc kubenswrapper[4792]: I1202 18:39:08.081418 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:39:08 crc kubenswrapper[4792]: I1202 18:39:08.081497 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:39:09 crc kubenswrapper[4792]: E1202 18:39:09.298933 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 18:39:09 crc kubenswrapper[4792]: E1202 18:39:09.300243 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qbtjp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-p6vhp_openshift-marketplace(bba4322e-397d-4b6a-b52c-14dfeecbf071): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:09 crc kubenswrapper[4792]: E1202 18:39:09.301598 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-p6vhp" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.377180 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 18:39:13 crc kubenswrapper[4792]: E1202 18:39:13.378799 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff935f8-c210-467e-937b-94dc8990998e" containerName="pruner" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.378836 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff935f8-c210-467e-937b-94dc8990998e" containerName="pruner" Dec 02 18:39:13 crc kubenswrapper[4792]: E1202 18:39:13.378869 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47e877ea-e045-4885-a7e0-da47996581b3" containerName="pruner" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.378885 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="47e877ea-e045-4885-a7e0-da47996581b3" containerName="pruner" Dec 02 18:39:13 crc kubenswrapper[4792]: E1202 18:39:13.378920 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e0e779-21ef-489e-8721-54533e24bf31" containerName="collect-profiles" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.378997 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e0e779-21ef-489e-8721-54533e24bf31" containerName="collect-profiles" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.379246 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="47e877ea-e045-4885-a7e0-da47996581b3" containerName="pruner" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.379288 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cff935f8-c210-467e-937b-94dc8990998e" containerName="pruner" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.379306 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e0e779-21ef-489e-8721-54533e24bf31" containerName="collect-profiles" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.383120 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.386915 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.387126 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.392583 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.520894 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e9a3ff99-516a-436e-920e-ef6eca8878e1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.521227 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e9a3ff99-516a-436e-920e-ef6eca8878e1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.622684 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e9a3ff99-516a-436e-920e-ef6eca8878e1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.622881 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e9a3ff99-516a-436e-920e-ef6eca8878e1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.623006 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e9a3ff99-516a-436e-920e-ef6eca8878e1-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.651693 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e9a3ff99-516a-436e-920e-ef6eca8878e1-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:13 crc kubenswrapper[4792]: I1202 18:39:13.713383 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:15 crc kubenswrapper[4792]: E1202 18:39:15.434873 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-p6vhp" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" Dec 02 18:39:15 crc kubenswrapper[4792]: E1202 18:39:15.532972 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 18:39:15 crc kubenswrapper[4792]: E1202 18:39:15.533672 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-27prc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qrcm2_openshift-marketplace(0ecd7ddf-ad6e-4f71-8d80-5626f4830547): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:15 crc kubenswrapper[4792]: E1202 18:39:15.534936 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qrcm2" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.898007 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qrcm2" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.962182 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.963029 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fbmnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-b92km_openshift-marketplace(a691800f-1f6a-4e86-b97a-73d8181b39d5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.964637 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-b92km" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.981096 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.981268 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dhrp7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-z4jbn_openshift-marketplace(77b66620-e883-40d8-8294-b6b4a2f3ad8c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.982707 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-z4jbn" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.994001 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.994705 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pqppw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-4kkfv_openshift-marketplace(a2745526-4b6e-4ccb-82d9-106a0bf83b74): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:16 crc kubenswrapper[4792]: E1202 18:39:16.995986 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-4kkfv" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" Dec 02 18:39:17 crc kubenswrapper[4792]: E1202 18:39:17.020900 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 18:39:17 crc kubenswrapper[4792]: E1202 18:39:17.021103 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9ws67,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-hrlnq_openshift-marketplace(c118dcaa-7c35-4a91-9b16-b3796e95fa86): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:17 crc kubenswrapper[4792]: E1202 18:39:17.022698 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-hrlnq" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.577500 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.579940 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.586752 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.706410 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-var-lock\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.706480 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kube-api-access\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.706550 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kubelet-dir\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.807489 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kube-api-access\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.807610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kubelet-dir\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.807694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-var-lock\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.807776 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-var-lock\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.807838 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kubelet-dir\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.831671 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kube-api-access\") pod \"installer-9-crc\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:18 crc kubenswrapper[4792]: I1202 18:39:18.924367 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:39:19 crc kubenswrapper[4792]: E1202 18:39:19.960593 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-b92km" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" Dec 02 18:39:19 crc kubenswrapper[4792]: E1202 18:39:19.960675 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-z4jbn" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" Dec 02 18:39:19 crc kubenswrapper[4792]: E1202 18:39:19.960741 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-hrlnq" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" Dec 02 18:39:19 crc kubenswrapper[4792]: E1202 18:39:19.960800 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-4kkfv" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.049475 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.049681 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vrh79,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-x7tgb_openshift-marketplace(8bad1c4d-9724-410d-8651-0a8c7c1d92b3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.050885 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-x7tgb" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.068917 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.069106 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xxgtv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-c85tx_openshift-marketplace(715f891b-4e52-454b-b5c0-22694ef088e8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.070268 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-c85tx" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" Dec 02 18:39:20 crc kubenswrapper[4792]: I1202 18:39:20.194203 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-2ls4m"] Dec 02 18:39:20 crc kubenswrapper[4792]: W1202 18:39:20.204889 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod778806a7_7e6f_4776_8233_b42b296ebc52.slice/crio-eaa7c19112d92957237a0a73b3abd381117b611b7b75df3ed78a272a6fe352ba WatchSource:0}: Error finding container eaa7c19112d92957237a0a73b3abd381117b611b7b75df3ed78a272a6fe352ba: Status 404 returned error can't find the container with id eaa7c19112d92957237a0a73b3abd381117b611b7b75df3ed78a272a6fe352ba Dec 02 18:39:20 crc kubenswrapper[4792]: I1202 18:39:20.451244 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 18:39:20 crc kubenswrapper[4792]: W1202 18:39:20.469018 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode9a3ff99_516a_436e_920e_ef6eca8878e1.slice/crio-85a21d5d8974e779516c2e56b03699c0042dc2238ea9b4014a172e39867c3b99 WatchSource:0}: Error finding container 85a21d5d8974e779516c2e56b03699c0042dc2238ea9b4014a172e39867c3b99: Status 404 returned error can't find the container with id 85a21d5d8974e779516c2e56b03699c0042dc2238ea9b4014a172e39867c3b99 Dec 02 18:39:20 crc kubenswrapper[4792]: I1202 18:39:20.481942 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 18:39:20 crc kubenswrapper[4792]: W1202 18:39:20.490566 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podfd6bd65f_0f46_42b5_b74a_d62fa8544aaa.slice/crio-47e292430a75129f37710ff713ebf0d2bcdd3679ea6cf2cb6514218516d972eb WatchSource:0}: Error finding container 47e292430a75129f37710ff713ebf0d2bcdd3679ea6cf2cb6514218516d972eb: Status 404 returned error can't find the container with id 47e292430a75129f37710ff713ebf0d2bcdd3679ea6cf2cb6514218516d972eb Dec 02 18:39:20 crc kubenswrapper[4792]: I1202 18:39:20.699145 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" event={"ID":"778806a7-7e6f-4776-8233-b42b296ebc52","Type":"ContainerStarted","Data":"563c342e88278cc8fd12888feacbe7b7ff3f586ee596050e3713f47b995e434f"} Dec 02 18:39:20 crc kubenswrapper[4792]: I1202 18:39:20.699504 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" event={"ID":"778806a7-7e6f-4776-8233-b42b296ebc52","Type":"ContainerStarted","Data":"eaa7c19112d92957237a0a73b3abd381117b611b7b75df3ed78a272a6fe352ba"} Dec 02 18:39:20 crc kubenswrapper[4792]: I1202 18:39:20.702099 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa","Type":"ContainerStarted","Data":"47e292430a75129f37710ff713ebf0d2bcdd3679ea6cf2cb6514218516d972eb"} Dec 02 18:39:20 crc kubenswrapper[4792]: I1202 18:39:20.704364 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e9a3ff99-516a-436e-920e-ef6eca8878e1","Type":"ContainerStarted","Data":"85a21d5d8974e779516c2e56b03699c0042dc2238ea9b4014a172e39867c3b99"} Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.706691 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-c85tx" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" Dec 02 18:39:20 crc kubenswrapper[4792]: E1202 18:39:20.707025 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-x7tgb" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" Dec 02 18:39:21 crc kubenswrapper[4792]: I1202 18:39:21.713952 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-2ls4m" event={"ID":"778806a7-7e6f-4776-8233-b42b296ebc52","Type":"ContainerStarted","Data":"d97d9f74ea671b4baea03ea41aeffd9c582a57ec22f66c9f1f845b5cac38d8f6"} Dec 02 18:39:21 crc kubenswrapper[4792]: I1202 18:39:21.716482 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa","Type":"ContainerStarted","Data":"1b7be409daf3b735598ca02b3ad00599eade4a6df610df0259b26a8a3515cc09"} Dec 02 18:39:21 crc kubenswrapper[4792]: I1202 18:39:21.718302 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e9a3ff99-516a-436e-920e-ef6eca8878e1","Type":"ContainerStarted","Data":"e2887cf678f7327baafbca12c6920fd6a46ade65e975cc468957c9c5b3427d1c"} Dec 02 18:39:21 crc kubenswrapper[4792]: I1202 18:39:21.736402 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-2ls4m" podStartSLOduration=173.73637225 podStartE2EDuration="2m53.73637225s" podCreationTimestamp="2025-12-02 18:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:39:21.735886297 +0000 UTC m=+192.508778625" watchObservedRunningTime="2025-12-02 18:39:21.73637225 +0000 UTC m=+192.509264588" Dec 02 18:39:21 crc kubenswrapper[4792]: I1202 18:39:21.764810 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.764791355 podStartE2EDuration="3.764791355s" podCreationTimestamp="2025-12-02 18:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:39:21.760786167 +0000 UTC m=+192.533678495" watchObservedRunningTime="2025-12-02 18:39:21.764791355 +0000 UTC m=+192.537683683" Dec 02 18:39:21 crc kubenswrapper[4792]: I1202 18:39:21.794628 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=8.794607087 podStartE2EDuration="8.794607087s" podCreationTimestamp="2025-12-02 18:39:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:39:21.787547707 +0000 UTC m=+192.560440035" watchObservedRunningTime="2025-12-02 18:39:21.794607087 +0000 UTC m=+192.567499415" Dec 02 18:39:22 crc kubenswrapper[4792]: I1202 18:39:22.740539 4792 generic.go:334] "Generic (PLEG): container finished" podID="e9a3ff99-516a-436e-920e-ef6eca8878e1" containerID="e2887cf678f7327baafbca12c6920fd6a46ade65e975cc468957c9c5b3427d1c" exitCode=0 Dec 02 18:39:22 crc kubenswrapper[4792]: I1202 18:39:22.742387 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e9a3ff99-516a-436e-920e-ef6eca8878e1","Type":"ContainerDied","Data":"e2887cf678f7327baafbca12c6920fd6a46ade65e975cc468957c9c5b3427d1c"} Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.008335 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.187956 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e9a3ff99-516a-436e-920e-ef6eca8878e1-kubelet-dir\") pod \"e9a3ff99-516a-436e-920e-ef6eca8878e1\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.188250 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e9a3ff99-516a-436e-920e-ef6eca8878e1-kube-api-access\") pod \"e9a3ff99-516a-436e-920e-ef6eca8878e1\" (UID: \"e9a3ff99-516a-436e-920e-ef6eca8878e1\") " Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.188200 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a3ff99-516a-436e-920e-ef6eca8878e1-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e9a3ff99-516a-436e-920e-ef6eca8878e1" (UID: "e9a3ff99-516a-436e-920e-ef6eca8878e1"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.196357 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9a3ff99-516a-436e-920e-ef6eca8878e1-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e9a3ff99-516a-436e-920e-ef6eca8878e1" (UID: "e9a3ff99-516a-436e-920e-ef6eca8878e1"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.290684 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e9a3ff99-516a-436e-920e-ef6eca8878e1-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.290730 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e9a3ff99-516a-436e-920e-ef6eca8878e1-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.762493 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e9a3ff99-516a-436e-920e-ef6eca8878e1","Type":"ContainerDied","Data":"85a21d5d8974e779516c2e56b03699c0042dc2238ea9b4014a172e39867c3b99"} Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.762651 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85a21d5d8974e779516c2e56b03699c0042dc2238ea9b4014a172e39867c3b99" Dec 02 18:39:24 crc kubenswrapper[4792]: I1202 18:39:24.762773 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 18:39:29 crc kubenswrapper[4792]: I1202 18:39:29.795224 4792 generic.go:334] "Generic (PLEG): container finished" podID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerID="de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa" exitCode=0 Dec 02 18:39:29 crc kubenswrapper[4792]: I1202 18:39:29.795283 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6vhp" event={"ID":"bba4322e-397d-4b6a-b52c-14dfeecbf071","Type":"ContainerDied","Data":"de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa"} Dec 02 18:39:30 crc kubenswrapper[4792]: I1202 18:39:30.804042 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6vhp" event={"ID":"bba4322e-397d-4b6a-b52c-14dfeecbf071","Type":"ContainerStarted","Data":"630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606"} Dec 02 18:39:30 crc kubenswrapper[4792]: I1202 18:39:30.829386 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p6vhp" podStartSLOduration=3.8519109030000003 podStartE2EDuration="1m1.829357786s" podCreationTimestamp="2025-12-02 18:38:29 +0000 UTC" firstStartedPulling="2025-12-02 18:38:32.199585809 +0000 UTC m=+142.972478137" lastFinishedPulling="2025-12-02 18:39:30.177032692 +0000 UTC m=+200.949925020" observedRunningTime="2025-12-02 18:39:30.828477081 +0000 UTC m=+201.601369429" watchObservedRunningTime="2025-12-02 18:39:30.829357786 +0000 UTC m=+201.602250124" Dec 02 18:39:33 crc kubenswrapper[4792]: I1202 18:39:33.832946 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b92km" event={"ID":"a691800f-1f6a-4e86-b97a-73d8181b39d5","Type":"ContainerStarted","Data":"687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571"} Dec 02 18:39:33 crc kubenswrapper[4792]: I1202 18:39:33.837765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrcm2" event={"ID":"0ecd7ddf-ad6e-4f71-8d80-5626f4830547","Type":"ContainerStarted","Data":"da94c2647f7c9d4e65451daf83c2dad0b87f94a91a3d9a429d0f3a99758f73ee"} Dec 02 18:39:34 crc kubenswrapper[4792]: I1202 18:39:34.845413 4792 generic.go:334] "Generic (PLEG): container finished" podID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerID="687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571" exitCode=0 Dec 02 18:39:34 crc kubenswrapper[4792]: I1202 18:39:34.845487 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b92km" event={"ID":"a691800f-1f6a-4e86-b97a-73d8181b39d5","Type":"ContainerDied","Data":"687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571"} Dec 02 18:39:34 crc kubenswrapper[4792]: I1202 18:39:34.848034 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerID="da94c2647f7c9d4e65451daf83c2dad0b87f94a91a3d9a429d0f3a99758f73ee" exitCode=0 Dec 02 18:39:34 crc kubenswrapper[4792]: I1202 18:39:34.848085 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrcm2" event={"ID":"0ecd7ddf-ad6e-4f71-8d80-5626f4830547","Type":"ContainerDied","Data":"da94c2647f7c9d4e65451daf83c2dad0b87f94a91a3d9a429d0f3a99758f73ee"} Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.240841 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hgblz"] Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.858268 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7tgb" event={"ID":"8bad1c4d-9724-410d-8651-0a8c7c1d92b3","Type":"ContainerStarted","Data":"b5b6776e7e3ac343d3080854851e2b98ae793a898fbcbd24810d5b36b7d69434"} Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.860654 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c85tx" event={"ID":"715f891b-4e52-454b-b5c0-22694ef088e8","Type":"ContainerStarted","Data":"5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7"} Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.862227 4792 generic.go:334] "Generic (PLEG): container finished" podID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerID="a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724" exitCode=0 Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.862309 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrlnq" event={"ID":"c118dcaa-7c35-4a91-9b16-b3796e95fa86","Type":"ContainerDied","Data":"a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724"} Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.865274 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b92km" event={"ID":"a691800f-1f6a-4e86-b97a-73d8181b39d5","Type":"ContainerStarted","Data":"b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f"} Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.867935 4792 generic.go:334] "Generic (PLEG): container finished" podID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerID="d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338" exitCode=0 Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.867961 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z4jbn" event={"ID":"77b66620-e883-40d8-8294-b6b4a2f3ad8c","Type":"ContainerDied","Data":"d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338"} Dec 02 18:39:35 crc kubenswrapper[4792]: I1202 18:39:35.983188 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b92km" podStartSLOduration=3.371483028 podStartE2EDuration="1m4.983167678s" podCreationTimestamp="2025-12-02 18:38:31 +0000 UTC" firstStartedPulling="2025-12-02 18:38:33.881336821 +0000 UTC m=+144.654229149" lastFinishedPulling="2025-12-02 18:39:35.493021471 +0000 UTC m=+206.265913799" observedRunningTime="2025-12-02 18:39:35.961167688 +0000 UTC m=+206.734060026" watchObservedRunningTime="2025-12-02 18:39:35.983167678 +0000 UTC m=+206.756060006" Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.876429 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrcm2" event={"ID":"0ecd7ddf-ad6e-4f71-8d80-5626f4830547","Type":"ContainerStarted","Data":"19cb41c332986cd9fdfa35e7cb45e88f2e83396d8cfbcb934e2a78328824c0dc"} Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.880280 4792 generic.go:334] "Generic (PLEG): container finished" podID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerID="b5b6776e7e3ac343d3080854851e2b98ae793a898fbcbd24810d5b36b7d69434" exitCode=0 Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.880356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7tgb" event={"ID":"8bad1c4d-9724-410d-8651-0a8c7c1d92b3","Type":"ContainerDied","Data":"b5b6776e7e3ac343d3080854851e2b98ae793a898fbcbd24810d5b36b7d69434"} Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.882332 4792 generic.go:334] "Generic (PLEG): container finished" podID="715f891b-4e52-454b-b5c0-22694ef088e8" containerID="5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7" exitCode=0 Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.882380 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c85tx" event={"ID":"715f891b-4e52-454b-b5c0-22694ef088e8","Type":"ContainerDied","Data":"5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7"} Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.885767 4792 generic.go:334] "Generic (PLEG): container finished" podID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerID="fb969ddfbf9165446e8f6ebc850e357edeb9ab796a93c225eb9a7db73b378978" exitCode=0 Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.885812 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kkfv" event={"ID":"a2745526-4b6e-4ccb-82d9-106a0bf83b74","Type":"ContainerDied","Data":"fb969ddfbf9165446e8f6ebc850e357edeb9ab796a93c225eb9a7db73b378978"} Dec 02 18:39:36 crc kubenswrapper[4792]: I1202 18:39:36.896322 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qrcm2" podStartSLOduration=4.926218949 podStartE2EDuration="1m7.896303732s" podCreationTimestamp="2025-12-02 18:38:29 +0000 UTC" firstStartedPulling="2025-12-02 18:38:32.709750576 +0000 UTC m=+143.482642904" lastFinishedPulling="2025-12-02 18:39:35.679835359 +0000 UTC m=+206.452727687" observedRunningTime="2025-12-02 18:39:36.892905348 +0000 UTC m=+207.665797676" watchObservedRunningTime="2025-12-02 18:39:36.896303732 +0000 UTC m=+207.669196050" Dec 02 18:39:38 crc kubenswrapper[4792]: I1202 18:39:38.081164 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:39:38 crc kubenswrapper[4792]: I1202 18:39:38.081252 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:39:38 crc kubenswrapper[4792]: I1202 18:39:38.081314 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:39:38 crc kubenswrapper[4792]: I1202 18:39:38.082065 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 18:39:38 crc kubenswrapper[4792]: I1202 18:39:38.082177 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c" gracePeriod=600 Dec 02 18:39:38 crc kubenswrapper[4792]: I1202 18:39:38.900587 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrlnq" event={"ID":"c118dcaa-7c35-4a91-9b16-b3796e95fa86","Type":"ContainerStarted","Data":"505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e"} Dec 02 18:39:38 crc kubenswrapper[4792]: I1202 18:39:38.903466 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z4jbn" event={"ID":"77b66620-e883-40d8-8294-b6b4a2f3ad8c","Type":"ContainerStarted","Data":"8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f"} Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.394577 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.394633 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.502437 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.579556 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.579599 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.629064 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.918324 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c" exitCode=0 Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.918983 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c"} Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.941074 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z4jbn" podStartSLOduration=6.938752881 podStartE2EDuration="1m10.941045938s" podCreationTimestamp="2025-12-02 18:38:29 +0000 UTC" firstStartedPulling="2025-12-02 18:38:32.327887001 +0000 UTC m=+143.100779329" lastFinishedPulling="2025-12-02 18:39:36.330180058 +0000 UTC m=+207.103072386" observedRunningTime="2025-12-02 18:39:39.93967598 +0000 UTC m=+210.712568308" watchObservedRunningTime="2025-12-02 18:39:39.941045938 +0000 UTC m=+210.713938276" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.965713 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:39:39 crc kubenswrapper[4792]: I1202 18:39:39.969279 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hrlnq" podStartSLOduration=7.313178807 podStartE2EDuration="1m9.96925377s" podCreationTimestamp="2025-12-02 18:38:30 +0000 UTC" firstStartedPulling="2025-12-02 18:38:33.859159705 +0000 UTC m=+144.632052033" lastFinishedPulling="2025-12-02 18:39:36.515234668 +0000 UTC m=+207.288126996" observedRunningTime="2025-12-02 18:39:39.965914078 +0000 UTC m=+210.738806416" watchObservedRunningTime="2025-12-02 18:39:39.96925377 +0000 UTC m=+210.742146098" Dec 02 18:39:41 crc kubenswrapper[4792]: I1202 18:39:41.537227 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:39:41 crc kubenswrapper[4792]: I1202 18:39:41.537729 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:39:41 crc kubenswrapper[4792]: I1202 18:39:41.600549 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:39:41 crc kubenswrapper[4792]: I1202 18:39:41.835275 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:39:41 crc kubenswrapper[4792]: I1202 18:39:41.835349 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:39:42 crc kubenswrapper[4792]: I1202 18:39:42.080026 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:39:42 crc kubenswrapper[4792]: I1202 18:39:42.143724 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:39:42 crc kubenswrapper[4792]: I1202 18:39:42.941677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"ac55c565ccef01baeaf0c74e3ed8322811e310a89ab86a4e7ab2e0c46a0fe098"} Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.233288 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b92km"] Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.234260 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b92km" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="registry-server" containerID="cri-o://b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f" gracePeriod=2 Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.635166 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.797262 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbmnl\" (UniqueName: \"kubernetes.io/projected/a691800f-1f6a-4e86-b97a-73d8181b39d5-kube-api-access-fbmnl\") pod \"a691800f-1f6a-4e86-b97a-73d8181b39d5\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.797383 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-catalog-content\") pod \"a691800f-1f6a-4e86-b97a-73d8181b39d5\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.797446 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-utilities\") pod \"a691800f-1f6a-4e86-b97a-73d8181b39d5\" (UID: \"a691800f-1f6a-4e86-b97a-73d8181b39d5\") " Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.798702 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-utilities" (OuterVolumeSpecName: "utilities") pod "a691800f-1f6a-4e86-b97a-73d8181b39d5" (UID: "a691800f-1f6a-4e86-b97a-73d8181b39d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.805927 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a691800f-1f6a-4e86-b97a-73d8181b39d5-kube-api-access-fbmnl" (OuterVolumeSpecName: "kube-api-access-fbmnl") pod "a691800f-1f6a-4e86-b97a-73d8181b39d5" (UID: "a691800f-1f6a-4e86-b97a-73d8181b39d5"). InnerVolumeSpecName "kube-api-access-fbmnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.817907 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a691800f-1f6a-4e86-b97a-73d8181b39d5" (UID: "a691800f-1f6a-4e86-b97a-73d8181b39d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.899564 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.899608 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbmnl\" (UniqueName: \"kubernetes.io/projected/a691800f-1f6a-4e86-b97a-73d8181b39d5-kube-api-access-fbmnl\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.899639 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a691800f-1f6a-4e86-b97a-73d8181b39d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.962010 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7tgb" event={"ID":"8bad1c4d-9724-410d-8651-0a8c7c1d92b3","Type":"ContainerStarted","Data":"e7c045ea11be22a796ab65a9552622fae5b599cd82c078898e4286a4bcc4ff8c"} Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.965961 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c85tx" event={"ID":"715f891b-4e52-454b-b5c0-22694ef088e8","Type":"ContainerStarted","Data":"367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4"} Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.968730 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kkfv" event={"ID":"a2745526-4b6e-4ccb-82d9-106a0bf83b74","Type":"ContainerStarted","Data":"86a4cdbb90dd42f8bf8312bca29226ab87ede2d5d2a11ca3ef8910e783d356f7"} Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.973815 4792 generic.go:334] "Generic (PLEG): container finished" podID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerID="b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f" exitCode=0 Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.973876 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b92km" event={"ID":"a691800f-1f6a-4e86-b97a-73d8181b39d5","Type":"ContainerDied","Data":"b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f"} Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.973915 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b92km" event={"ID":"a691800f-1f6a-4e86-b97a-73d8181b39d5","Type":"ContainerDied","Data":"6775bc37b8dccdd1e74048f6e56472b424be49ea7c164876bfe33176727ad2cf"} Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.973940 4792 scope.go:117] "RemoveContainer" containerID="b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.974116 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b92km" Dec 02 18:39:44 crc kubenswrapper[4792]: I1202 18:39:44.985847 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x7tgb" podStartSLOduration=4.209008245 podStartE2EDuration="1m12.98583664s" podCreationTimestamp="2025-12-02 18:38:32 +0000 UTC" firstStartedPulling="2025-12-02 18:38:35.101878703 +0000 UTC m=+145.874771031" lastFinishedPulling="2025-12-02 18:39:43.878707098 +0000 UTC m=+214.651599426" observedRunningTime="2025-12-02 18:39:44.984323928 +0000 UTC m=+215.757216276" watchObservedRunningTime="2025-12-02 18:39:44.98583664 +0000 UTC m=+215.758728968" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.002636 4792 scope.go:117] "RemoveContainer" containerID="687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.031686 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c85tx" podStartSLOduration=5.216201604 podStartE2EDuration="1m14.0316636s" podCreationTimestamp="2025-12-02 18:38:31 +0000 UTC" firstStartedPulling="2025-12-02 18:38:34.96650751 +0000 UTC m=+145.739399838" lastFinishedPulling="2025-12-02 18:39:43.781969466 +0000 UTC m=+214.554861834" observedRunningTime="2025-12-02 18:39:45.028267656 +0000 UTC m=+215.801160014" watchObservedRunningTime="2025-12-02 18:39:45.0316636 +0000 UTC m=+215.804555948" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.034909 4792 scope.go:117] "RemoveContainer" containerID="094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.055033 4792 scope.go:117] "RemoveContainer" containerID="b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f" Dec 02 18:39:45 crc kubenswrapper[4792]: E1202 18:39:45.056840 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f\": container with ID starting with b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f not found: ID does not exist" containerID="b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.056978 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f"} err="failed to get container status \"b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f\": rpc error: code = NotFound desc = could not find container \"b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f\": container with ID starting with b7340bd236b58025272647189d4a0aa84593a9fd1aa4038ee450eb1172019e2f not found: ID does not exist" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.057218 4792 scope.go:117] "RemoveContainer" containerID="687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571" Dec 02 18:39:45 crc kubenswrapper[4792]: E1202 18:39:45.058029 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571\": container with ID starting with 687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571 not found: ID does not exist" containerID="687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.058051 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571"} err="failed to get container status \"687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571\": rpc error: code = NotFound desc = could not find container \"687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571\": container with ID starting with 687c3468c4457a83e62f1cc306c8ca3cd35f7cf68317c8e45c68c6d01dc0d571 not found: ID does not exist" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.058074 4792 scope.go:117] "RemoveContainer" containerID="094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab" Dec 02 18:39:45 crc kubenswrapper[4792]: E1202 18:39:45.058272 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab\": container with ID starting with 094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab not found: ID does not exist" containerID="094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.058299 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab"} err="failed to get container status \"094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab\": rpc error: code = NotFound desc = could not find container \"094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab\": container with ID starting with 094c535f25a70531258532f0c0e6b5e14e046e51f2bda27da98e7fe4bef316ab not found: ID does not exist" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.058430 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4kkfv" podStartSLOduration=6.116489059 podStartE2EDuration="1m16.058400461s" podCreationTimestamp="2025-12-02 18:38:29 +0000 UTC" firstStartedPulling="2025-12-02 18:38:33.847797239 +0000 UTC m=+144.620689567" lastFinishedPulling="2025-12-02 18:39:43.789708611 +0000 UTC m=+214.562600969" observedRunningTime="2025-12-02 18:39:45.050803801 +0000 UTC m=+215.823696209" watchObservedRunningTime="2025-12-02 18:39:45.058400461 +0000 UTC m=+215.831292799" Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.066869 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b92km"] Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.069581 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b92km"] Dec 02 18:39:45 crc kubenswrapper[4792]: I1202 18:39:45.552674 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" path="/var/lib/kubelet/pods/a691800f-1f6a-4e86-b97a-73d8181b39d5/volumes" Dec 02 18:39:49 crc kubenswrapper[4792]: I1202 18:39:49.391253 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:39:49 crc kubenswrapper[4792]: I1202 18:39:49.391567 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:39:49 crc kubenswrapper[4792]: I1202 18:39:49.443047 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:39:49 crc kubenswrapper[4792]: I1202 18:39:49.619183 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:39:49 crc kubenswrapper[4792]: I1202 18:39:49.988300 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:39:49 crc kubenswrapper[4792]: I1202 18:39:49.988737 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:39:50 crc kubenswrapper[4792]: I1202 18:39:50.029274 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:39:50 crc kubenswrapper[4792]: I1202 18:39:50.078964 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:39:50 crc kubenswrapper[4792]: I1202 18:39:50.099941 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:39:50 crc kubenswrapper[4792]: I1202 18:39:50.632021 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4kkfv"] Dec 02 18:39:51 crc kubenswrapper[4792]: I1202 18:39:51.584392 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:39:52 crc kubenswrapper[4792]: I1202 18:39:52.035721 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4kkfv" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="registry-server" containerID="cri-o://86a4cdbb90dd42f8bf8312bca29226ab87ede2d5d2a11ca3ef8910e783d356f7" gracePeriod=2 Dec 02 18:39:52 crc kubenswrapper[4792]: I1202 18:39:52.440537 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:39:52 crc kubenswrapper[4792]: I1202 18:39:52.440633 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:39:52 crc kubenswrapper[4792]: I1202 18:39:52.509422 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:39:52 crc kubenswrapper[4792]: I1202 18:39:52.565693 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:39:52 crc kubenswrapper[4792]: I1202 18:39:52.565918 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:39:52 crc kubenswrapper[4792]: I1202 18:39:52.634676 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:39:53 crc kubenswrapper[4792]: I1202 18:39:53.033184 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qrcm2"] Dec 02 18:39:53 crc kubenswrapper[4792]: I1202 18:39:53.033868 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qrcm2" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="registry-server" containerID="cri-o://19cb41c332986cd9fdfa35e7cb45e88f2e83396d8cfbcb934e2a78328824c0dc" gracePeriod=2 Dec 02 18:39:53 crc kubenswrapper[4792]: I1202 18:39:53.081041 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:39:53 crc kubenswrapper[4792]: I1202 18:39:53.086248 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:39:56 crc kubenswrapper[4792]: I1202 18:39:56.063895 4792 generic.go:334] "Generic (PLEG): container finished" podID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerID="86a4cdbb90dd42f8bf8312bca29226ab87ede2d5d2a11ca3ef8910e783d356f7" exitCode=0 Dec 02 18:39:56 crc kubenswrapper[4792]: I1202 18:39:56.063980 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kkfv" event={"ID":"a2745526-4b6e-4ccb-82d9-106a0bf83b74","Type":"ContainerDied","Data":"86a4cdbb90dd42f8bf8312bca29226ab87ede2d5d2a11ca3ef8910e783d356f7"} Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.076120 4792 generic.go:334] "Generic (PLEG): container finished" podID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerID="19cb41c332986cd9fdfa35e7cb45e88f2e83396d8cfbcb934e2a78328824c0dc" exitCode=0 Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.076184 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrcm2" event={"ID":"0ecd7ddf-ad6e-4f71-8d80-5626f4830547","Type":"ContainerDied","Data":"19cb41c332986cd9fdfa35e7cb45e88f2e83396d8cfbcb934e2a78328824c0dc"} Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.227948 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x7tgb"] Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.228517 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x7tgb" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="registry-server" containerID="cri-o://e7c045ea11be22a796ab65a9552622fae5b599cd82c078898e4286a4bcc4ff8c" gracePeriod=2 Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.853353 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.995425 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-catalog-content\") pod \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.996159 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqppw\" (UniqueName: \"kubernetes.io/projected/a2745526-4b6e-4ccb-82d9-106a0bf83b74-kube-api-access-pqppw\") pod \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.996264 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-utilities\") pod \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\" (UID: \"a2745526-4b6e-4ccb-82d9-106a0bf83b74\") " Dec 02 18:39:57 crc kubenswrapper[4792]: I1202 18:39:57.996966 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-utilities" (OuterVolumeSpecName: "utilities") pod "a2745526-4b6e-4ccb-82d9-106a0bf83b74" (UID: "a2745526-4b6e-4ccb-82d9-106a0bf83b74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.024240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2745526-4b6e-4ccb-82d9-106a0bf83b74-kube-api-access-pqppw" (OuterVolumeSpecName: "kube-api-access-pqppw") pod "a2745526-4b6e-4ccb-82d9-106a0bf83b74" (UID: "a2745526-4b6e-4ccb-82d9-106a0bf83b74"). InnerVolumeSpecName "kube-api-access-pqppw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.044377 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2745526-4b6e-4ccb-82d9-106a0bf83b74" (UID: "a2745526-4b6e-4ccb-82d9-106a0bf83b74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.086297 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4kkfv" event={"ID":"a2745526-4b6e-4ccb-82d9-106a0bf83b74","Type":"ContainerDied","Data":"7815ec0c73e128ddeb495ac27451518cfc296f7d2af291a8134545d730c9b10e"} Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.086368 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4kkfv" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.086372 4792 scope.go:117] "RemoveContainer" containerID="86a4cdbb90dd42f8bf8312bca29226ab87ede2d5d2a11ca3ef8910e783d356f7" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.097600 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.097652 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2745526-4b6e-4ccb-82d9-106a0bf83b74-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.097669 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqppw\" (UniqueName: \"kubernetes.io/projected/a2745526-4b6e-4ccb-82d9-106a0bf83b74-kube-api-access-pqppw\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.110636 4792 scope.go:117] "RemoveContainer" containerID="fb969ddfbf9165446e8f6ebc850e357edeb9ab796a93c225eb9a7db73b378978" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.123262 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4kkfv"] Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.126159 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4kkfv"] Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.148923 4792 scope.go:117] "RemoveContainer" containerID="da5461fc8187e03154fd11b2842d2b9ad060b77d37ba7a748523801a71629168" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.164810 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.300230 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-catalog-content\") pod \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.300337 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-utilities\") pod \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.300429 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27prc\" (UniqueName: \"kubernetes.io/projected/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-kube-api-access-27prc\") pod \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\" (UID: \"0ecd7ddf-ad6e-4f71-8d80-5626f4830547\") " Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.302093 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-utilities" (OuterVolumeSpecName: "utilities") pod "0ecd7ddf-ad6e-4f71-8d80-5626f4830547" (UID: "0ecd7ddf-ad6e-4f71-8d80-5626f4830547"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.307979 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-kube-api-access-27prc" (OuterVolumeSpecName: "kube-api-access-27prc") pod "0ecd7ddf-ad6e-4f71-8d80-5626f4830547" (UID: "0ecd7ddf-ad6e-4f71-8d80-5626f4830547"). InnerVolumeSpecName "kube-api-access-27prc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.369790 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ecd7ddf-ad6e-4f71-8d80-5626f4830547" (UID: "0ecd7ddf-ad6e-4f71-8d80-5626f4830547"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.402349 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.402401 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:58 crc kubenswrapper[4792]: I1202 18:39:58.402412 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27prc\" (UniqueName: \"kubernetes.io/projected/0ecd7ddf-ad6e-4f71-8d80-5626f4830547-kube-api-access-27prc\") on node \"crc\" DevicePath \"\"" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.099825 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qrcm2" event={"ID":"0ecd7ddf-ad6e-4f71-8d80-5626f4830547","Type":"ContainerDied","Data":"cbd9fb99d7ae0b1c9605a22aa996710599309566b44a950c6cbee5275a5199e4"} Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.099912 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qrcm2" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.102672 4792 scope.go:117] "RemoveContainer" containerID="19cb41c332986cd9fdfa35e7cb45e88f2e83396d8cfbcb934e2a78328824c0dc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.128885 4792 scope.go:117] "RemoveContainer" containerID="da94c2647f7c9d4e65451daf83c2dad0b87f94a91a3d9a429d0f3a99758f73ee" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.156811 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qrcm2"] Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.162499 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qrcm2"] Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.172949 4792 scope.go:117] "RemoveContainer" containerID="5ada913068d4c5a7f20c63b435cd5ca40f0939da6fbc2635a605807efad16db9" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.184720 4792 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185080 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185102 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185115 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="extract-utilities" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185140 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="extract-utilities" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185152 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="extract-content" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185161 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="extract-content" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185175 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="extract-content" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185182 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="extract-content" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185191 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185198 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185224 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185230 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185240 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="extract-utilities" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185246 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="extract-utilities" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185256 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="extract-content" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185263 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="extract-content" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185271 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="extract-utilities" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185278 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="extract-utilities" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.185305 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a3ff99-516a-436e-920e-ef6eca8878e1" containerName="pruner" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185311 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a3ff99-516a-436e-920e-ef6eca8878e1" containerName="pruner" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185477 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185492 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185501 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a3ff99-516a-436e-920e-ef6eca8878e1" containerName="pruner" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.185510 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a691800f-1f6a-4e86-b97a-73d8181b39d5" containerName="registry-server" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.186103 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.187076 4792 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.187678 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8" gracePeriod=15 Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.187696 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1" gracePeriod=15 Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.187886 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad" gracePeriod=15 Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.187912 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f" gracePeriod=15 Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.187951 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee" gracePeriod=15 Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.188781 4792 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.189157 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189191 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.189226 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189241 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.189261 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189274 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.189298 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189311 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.189329 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189341 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.189370 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189390 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189671 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189702 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189728 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189747 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189766 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.189784 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.190061 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.190082 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.254583 4792 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.188:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.316809 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.316983 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.317045 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.317132 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.317225 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.317271 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.317392 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.317425 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418439 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418491 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418505 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418639 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418667 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418682 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418729 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418805 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418847 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418867 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418887 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418911 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418933 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418957 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.418977 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.544121 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.547572 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ecd7ddf-ad6e-4f71-8d80-5626f4830547" path="/var/lib/kubelet/pods/0ecd7ddf-ad6e-4f71-8d80-5626f4830547/volumes" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.548883 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2745526-4b6e-4ccb-82d9-106a0bf83b74" path="/var/lib/kubelet/pods/a2745526-4b6e-4ccb-82d9-106a0bf83b74/volumes" Dec 02 18:39:59 crc kubenswrapper[4792]: I1202 18:39:59.555579 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:39:59 crc kubenswrapper[4792]: W1202 18:39:59.601296 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-62ad694ae71950f07166a3f23659107a687b49c83da1332dc0e3746cab9ef32f WatchSource:0}: Error finding container 62ad694ae71950f07166a3f23659107a687b49c83da1332dc0e3746cab9ef32f: Status 404 returned error can't find the container with id 62ad694ae71950f07166a3f23659107a687b49c83da1332dc0e3746cab9ef32f Dec 02 18:39:59 crc kubenswrapper[4792]: E1202 18:39:59.606608 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.188:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d7a070753de9e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 18:39:59.605399198 +0000 UTC m=+230.378291526,LastTimestamp:2025-12-02 18:39:59.605399198 +0000 UTC m=+230.378291526,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 18:40:00 crc kubenswrapper[4792]: I1202 18:40:00.117793 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"62ad694ae71950f07166a3f23659107a687b49c83da1332dc0e3746cab9ef32f"} Dec 02 18:40:00 crc kubenswrapper[4792]: I1202 18:40:00.123580 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 18:40:00 crc kubenswrapper[4792]: I1202 18:40:00.125800 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 18:40:00 crc kubenswrapper[4792]: I1202 18:40:00.127258 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f" exitCode=2 Dec 02 18:40:00 crc kubenswrapper[4792]: I1202 18:40:00.132059 4792 generic.go:334] "Generic (PLEG): container finished" podID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerID="e7c045ea11be22a796ab65a9552622fae5b599cd82c078898e4286a4bcc4ff8c" exitCode=0 Dec 02 18:40:00 crc kubenswrapper[4792]: I1202 18:40:00.132100 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7tgb" event={"ID":"8bad1c4d-9724-410d-8651-0a8c7c1d92b3","Type":"ContainerDied","Data":"e7c045ea11be22a796ab65a9552622fae5b599cd82c078898e4286a4bcc4ff8c"} Dec 02 18:40:00 crc kubenswrapper[4792]: I1202 18:40:00.272730 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" containerName="oauth-openshift" containerID="cri-o://270aab60c131fe1e3ec3b3fd138f3db266cb2cc4f44f6de05f377affd8afd305" gracePeriod=15 Dec 02 18:40:01 crc kubenswrapper[4792]: I1202 18:40:01.143375 4792 generic.go:334] "Generic (PLEG): container finished" podID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" containerID="1b7be409daf3b735598ca02b3ad00599eade4a6df610df0259b26a8a3515cc09" exitCode=0 Dec 02 18:40:01 crc kubenswrapper[4792]: I1202 18:40:01.143453 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa","Type":"ContainerDied","Data":"1b7be409daf3b735598ca02b3ad00599eade4a6df610df0259b26a8a3515cc09"} Dec 02 18:40:01 crc kubenswrapper[4792]: I1202 18:40:01.144702 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.151505 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.155643 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.156818 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1" exitCode=0 Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.156946 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee" exitCode=0 Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.157036 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad" exitCode=0 Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.157112 4792 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8" exitCode=0 Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.157267 4792 scope.go:117] "RemoveContainer" containerID="68658f533c4901de61fe9842e2cec731a1fe967cf911765ac56e57adec6ca810" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.160326 4792 generic.go:334] "Generic (PLEG): container finished" podID="04246b55-2809-4c64-abaf-9bed254d0e80" containerID="270aab60c131fe1e3ec3b3fd138f3db266cb2cc4f44f6de05f377affd8afd305" exitCode=0 Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.160427 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" event={"ID":"04246b55-2809-4c64-abaf-9bed254d0e80","Type":"ContainerDied","Data":"270aab60c131fe1e3ec3b3fd138f3db266cb2cc4f44f6de05f377affd8afd305"} Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.162276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446"} Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.162995 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.163142 4792 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.188:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.187857 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04246b55_2809_4c64_abaf_9bed254d0e80.slice/crio-conmon-270aab60c131fe1e3ec3b3fd138f3db266cb2cc4f44f6de05f377affd8afd305.scope\": RecentStats: unable to find data in memory cache]" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.402082 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.402656 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.403232 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.452999 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.453946 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.454266 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.490849 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.491314 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.491810 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.492038 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.492245 4792 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.492275 4792 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.492460 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="200ms" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.568962 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.569743 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570026 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570309 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570600 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kubelet-dir\") pod \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570702 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" (UID: "fd6bd65f-0f46-42b5-b74a-d62fa8544aaa"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570737 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-catalog-content\") pod \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570891 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kube-api-access\") pod \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570931 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrh79\" (UniqueName: \"kubernetes.io/projected/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-kube-api-access-vrh79\") pod \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.570968 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-var-lock\") pod \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\" (UID: \"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.571001 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-utilities\") pod \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\" (UID: \"8bad1c4d-9724-410d-8651-0a8c7c1d92b3\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.571127 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-var-lock" (OuterVolumeSpecName: "var-lock") pod "fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" (UID: "fd6bd65f-0f46-42b5-b74a-d62fa8544aaa"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.571937 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-utilities" (OuterVolumeSpecName: "utilities") pod "8bad1c4d-9724-410d-8651-0a8c7c1d92b3" (UID: "8bad1c4d-9724-410d-8651-0a8c7c1d92b3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.571873 4792 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.572666 4792 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.579034 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" (UID: "fd6bd65f-0f46-42b5-b74a-d62fa8544aaa"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.579162 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-kube-api-access-vrh79" (OuterVolumeSpecName: "kube-api-access-vrh79") pod "8bad1c4d-9724-410d-8651-0a8c7c1d92b3" (UID: "8bad1c4d-9724-410d-8651-0a8c7c1d92b3"). InnerVolumeSpecName "kube-api-access-vrh79". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673512 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-login\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673582 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-error\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673648 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg555\" (UniqueName: \"kubernetes.io/projected/04246b55-2809-4c64-abaf-9bed254d0e80-kube-api-access-cg555\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673687 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04246b55-2809-4c64-abaf-9bed254d0e80-audit-dir\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673768 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-serving-cert\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673787 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-trusted-ca-bundle\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673811 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-router-certs\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673841 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-audit-policies\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673871 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-cliconfig\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673890 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-session\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673940 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-ocp-branding-template\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.673969 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-service-ca\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.674017 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-provider-selection\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.674055 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-idp-0-file-data\") pod \"04246b55-2809-4c64-abaf-9bed254d0e80\" (UID: \"04246b55-2809-4c64-abaf-9bed254d0e80\") " Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.674445 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fd6bd65f-0f46-42b5-b74a-d62fa8544aaa-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.674466 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrh79\" (UniqueName: \"kubernetes.io/projected/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-kube-api-access-vrh79\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.674477 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.675721 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/04246b55-2809-4c64-abaf-9bed254d0e80-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.676395 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.676386 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.676439 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.677151 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.681058 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.681338 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.681663 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.681885 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.682806 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04246b55-2809-4c64-abaf-9bed254d0e80-kube-api-access-cg555" (OuterVolumeSpecName: "kube-api-access-cg555") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "kube-api-access-cg555". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.682820 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.683012 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.683371 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.688357 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "04246b55-2809-4c64-abaf-9bed254d0e80" (UID: "04246b55-2809-4c64-abaf-9bed254d0e80"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: E1202 18:40:02.694427 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="400ms" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.695821 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8bad1c4d-9724-410d-8651-0a8c7c1d92b3" (UID: "8bad1c4d-9724-410d-8651-0a8c7c1d92b3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776211 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776264 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776278 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776289 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776302 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg555\" (UniqueName: \"kubernetes.io/projected/04246b55-2809-4c64-abaf-9bed254d0e80-kube-api-access-cg555\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776316 4792 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/04246b55-2809-4c64-abaf-9bed254d0e80-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776329 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776342 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776354 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776364 4792 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776374 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776386 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776396 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bad1c4d-9724-410d-8651-0a8c7c1d92b3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776408 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:02 crc kubenswrapper[4792]: I1202 18:40:02.776418 4792 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/04246b55-2809-4c64-abaf-9bed254d0e80-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:03 crc kubenswrapper[4792]: E1202 18:40:03.095985 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="800ms" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.109335 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.110606 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.111467 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.111956 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.112652 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.113070 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.184808 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.186303 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.186334 4792 scope.go:117] "RemoveContainer" containerID="2153e3d5a1b277988329aaab097b2fdb9955c4d79382b08ff93827ad69b629a1" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.188315 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fd6bd65f-0f46-42b5-b74a-d62fa8544aaa","Type":"ContainerDied","Data":"47e292430a75129f37710ff713ebf0d2bcdd3679ea6cf2cb6514218516d972eb"} Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.188358 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47e292430a75129f37710ff713ebf0d2bcdd3679ea6cf2cb6514218516d972eb" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.188407 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.190306 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.190306 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" event={"ID":"04246b55-2809-4c64-abaf-9bed254d0e80","Type":"ContainerDied","Data":"0ce3005a6024188ace08dceaad20e852456465456d985e4cbec333f57937db8e"} Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.191368 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.192341 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.192670 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.193035 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.200165 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7tgb" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.200205 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7tgb" event={"ID":"8bad1c4d-9724-410d-8651-0a8c7c1d92b3","Type":"ContainerDied","Data":"364c5f70b78544589c3ca3ce547b2821f73aa738f03d5269673a6bcc1b89992e"} Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.201004 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: E1202 18:40:03.201498 4792 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.188:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.201839 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.202385 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.202708 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.209643 4792 scope.go:117] "RemoveContainer" containerID="3a32905e02f1381c6219befa68c0ad8299c4227f81dd2a5ddb46c9602a0ca9ee" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.212375 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.213300 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.214030 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.214369 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.214730 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.215652 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.215937 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.216148 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.218446 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.218634 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.218807 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.218973 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.226127 4792 scope.go:117] "RemoveContainer" containerID="42f77661dcf130fe01d987036874c88b7dc13fbb8cfb9fc8b80a67b700db35ad" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.238790 4792 scope.go:117] "RemoveContainer" containerID="347659c5e446ceedca48f408f524fecace4a5273276847a6dd204a139064239f" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.251259 4792 scope.go:117] "RemoveContainer" containerID="0640000cf1f9813279df416a0e3ecbe3f95b89897236a71623cd6f6f953189d8" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.270150 4792 scope.go:117] "RemoveContainer" containerID="f3d23c935875438b54d4a87750ae8ddc64b8e12368ba56b4426a3a42bcb26e8c" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.284826 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.284972 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.284995 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.285300 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.285338 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.285356 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.289917 4792 scope.go:117] "RemoveContainer" containerID="270aab60c131fe1e3ec3b3fd138f3db266cb2cc4f44f6de05f377affd8afd305" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.309488 4792 scope.go:117] "RemoveContainer" containerID="e7c045ea11be22a796ab65a9552622fae5b599cd82c078898e4286a4bcc4ff8c" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.325945 4792 scope.go:117] "RemoveContainer" containerID="b5b6776e7e3ac343d3080854851e2b98ae793a898fbcbd24810d5b36b7d69434" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.346251 4792 scope.go:117] "RemoveContainer" containerID="482a4d0ec46f163b9400f7bd7b2a125a9fe817f61065558a7d7b677b065b127c" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.386812 4792 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.386864 4792 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.386879 4792 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.530611 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.531388 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.531982 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.532486 4792 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:03 crc kubenswrapper[4792]: I1202 18:40:03.551276 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 02 18:40:03 crc kubenswrapper[4792]: E1202 18:40:03.897682 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="1.6s" Dec 02 18:40:05 crc kubenswrapper[4792]: E1202 18:40:05.498282 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="3.2s" Dec 02 18:40:06 crc kubenswrapper[4792]: E1202 18:40:06.787081 4792 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.188:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d7a070753de9e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 18:39:59.605399198 +0000 UTC m=+230.378291526,LastTimestamp:2025-12-02 18:39:59.605399198 +0000 UTC m=+230.378291526,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 18:40:08 crc kubenswrapper[4792]: E1202 18:40:08.701576 4792 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.188:6443: connect: connection refused" interval="6.4s" Dec 02 18:40:09 crc kubenswrapper[4792]: I1202 18:40:09.542091 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:09 crc kubenswrapper[4792]: I1202 18:40:09.542934 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:09 crc kubenswrapper[4792]: I1202 18:40:09.543387 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.279455 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.279831 4792 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded" exitCode=1 Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.279864 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded"} Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.280321 4792 scope.go:117] "RemoveContainer" containerID="a398b4f0f0a1090598c7c4586a9a0881744c84849f361c5cfd1fd8afd2031ded" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.280972 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.281466 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.282039 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.282761 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.539157 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.540722 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.541462 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.541923 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.542238 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.556024 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.556065 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:12 crc kubenswrapper[4792]: E1202 18:40:12.556558 4792 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:12 crc kubenswrapper[4792]: I1202 18:40:12.557168 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:12 crc kubenswrapper[4792]: W1202 18:40:12.583101 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-28c7d71dd0dbe19646be522e0eaa12f8295c10bcac815dc72ee4d8f142acbd86 WatchSource:0}: Error finding container 28c7d71dd0dbe19646be522e0eaa12f8295c10bcac815dc72ee4d8f142acbd86: Status 404 returned error can't find the container with id 28c7d71dd0dbe19646be522e0eaa12f8295c10bcac815dc72ee4d8f142acbd86 Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.290961 4792 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="bf7076aad81b07aa613cebc192c1f6fe3c04f818ac781197642fbf48926291f9" exitCode=0 Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.291102 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"bf7076aad81b07aa613cebc192c1f6fe3c04f818ac781197642fbf48926291f9"} Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.291384 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"28c7d71dd0dbe19646be522e0eaa12f8295c10bcac815dc72ee4d8f142acbd86"} Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.291675 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.291693 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:13 crc kubenswrapper[4792]: E1202 18:40:13.292283 4792 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.292447 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.292857 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.293499 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.294083 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.297062 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.297111 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7d0f8ebf62c9746787425afaf0581e5c0e49c7888da122fbab9583f0221d6c28"} Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.298090 4792 status_manager.go:851] "Failed to get status for pod" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" pod="openshift-authentication/oauth-openshift-558db77b4-hgblz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-hgblz\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.298870 4792 status_manager.go:851] "Failed to get status for pod" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.299383 4792 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:13 crc kubenswrapper[4792]: I1202 18:40:13.299897 4792 status_manager.go:851] "Failed to get status for pod" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" pod="openshift-marketplace/redhat-operators-x7tgb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-x7tgb\": dial tcp 38.102.83.188:6443: connect: connection refused" Dec 02 18:40:14 crc kubenswrapper[4792]: I1202 18:40:14.305539 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2f0d5b2ccdb540b0e6521d47a32e8294d6d99021cd2a679b9ae7d9b02efb5535"} Dec 02 18:40:14 crc kubenswrapper[4792]: I1202 18:40:14.305889 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d86db962f1871d9f9dc46b36ff73ed843d21b6c961c5f9e3479358e63e2071ba"} Dec 02 18:40:15 crc kubenswrapper[4792]: I1202 18:40:15.314766 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ea876d09084c842b9b52365b18cb2bb4acf898c26ab6e095de8cc92b1c08c7f9"} Dec 02 18:40:15 crc kubenswrapper[4792]: I1202 18:40:15.314810 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"68210092aaa90b638f43917ab244ff9855c8a6350105327fea9c775c89590d84"} Dec 02 18:40:15 crc kubenswrapper[4792]: I1202 18:40:15.314822 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f0ad02fcdcfd5e96af4b8daf5ad74a80b883321ba965549927ea80cd0071727e"} Dec 02 18:40:15 crc kubenswrapper[4792]: I1202 18:40:15.315005 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:15 crc kubenswrapper[4792]: I1202 18:40:15.315236 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:15 crc kubenswrapper[4792]: I1202 18:40:15.315269 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:17 crc kubenswrapper[4792]: I1202 18:40:17.449775 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:40:17 crc kubenswrapper[4792]: I1202 18:40:17.557776 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:17 crc kubenswrapper[4792]: I1202 18:40:17.557815 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:17 crc kubenswrapper[4792]: I1202 18:40:17.563560 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:18 crc kubenswrapper[4792]: I1202 18:40:18.540486 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:40:18 crc kubenswrapper[4792]: I1202 18:40:18.548792 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:40:20 crc kubenswrapper[4792]: I1202 18:40:20.329672 4792 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:20 crc kubenswrapper[4792]: I1202 18:40:20.442932 4792 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="894b1fae-d851-48fa-ac83-bef575cca4e3" Dec 02 18:40:20 crc kubenswrapper[4792]: I1202 18:40:20.560440 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:20 crc kubenswrapper[4792]: I1202 18:40:20.560471 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:20 crc kubenswrapper[4792]: I1202 18:40:20.572917 4792 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="894b1fae-d851-48fa-ac83-bef575cca4e3" Dec 02 18:40:20 crc kubenswrapper[4792]: I1202 18:40:20.576174 4792 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://d86db962f1871d9f9dc46b36ff73ed843d21b6c961c5f9e3479358e63e2071ba" Dec 02 18:40:20 crc kubenswrapper[4792]: I1202 18:40:20.576193 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:21 crc kubenswrapper[4792]: I1202 18:40:21.573420 4792 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:21 crc kubenswrapper[4792]: I1202 18:40:21.573468 4792 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9668059a-1772-400b-b2ad-86aa3d306dd9" Dec 02 18:40:21 crc kubenswrapper[4792]: I1202 18:40:21.576031 4792 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="894b1fae-d851-48fa-ac83-bef575cca4e3" Dec 02 18:40:27 crc kubenswrapper[4792]: I1202 18:40:27.457587 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 18:40:30 crc kubenswrapper[4792]: I1202 18:40:30.175777 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 18:40:30 crc kubenswrapper[4792]: I1202 18:40:30.482318 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.141156 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.400148 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.420729 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.434637 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.435123 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.582871 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.757134 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 18:40:31 crc kubenswrapper[4792]: I1202 18:40:31.835387 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.062561 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.157766 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.216801 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.298105 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.348805 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.589816 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.639361 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.673068 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.730799 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.736693 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.876767 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.921114 4792 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.924084 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.973616 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 18:40:32 crc kubenswrapper[4792]: I1202 18:40:32.994917 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.169370 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.201467 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.291340 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.457326 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.491135 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.591034 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.767725 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 18:40:33 crc kubenswrapper[4792]: I1202 18:40:33.948021 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.038394 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.090673 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.100997 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.132679 4792 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.227085 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.279480 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.295281 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.318587 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.343205 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.395738 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.417105 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.424669 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.477599 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.480940 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.541800 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.577557 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.650572 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.654870 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.754344 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.792120 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 18:40:34 crc kubenswrapper[4792]: I1202 18:40:34.917401 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.002086 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.066381 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.166419 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.209089 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.254152 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.286236 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.382776 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.394989 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.396948 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.405560 4792 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.417611 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.487833 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.516733 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.544170 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.576446 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.601959 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.788743 4792 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.830139 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 18:40:35 crc kubenswrapper[4792]: I1202 18:40:35.897624 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.058170 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.095735 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.193703 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.251047 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.350801 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.550789 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.599293 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.627584 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.646870 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.666721 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.682708 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.696769 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.831362 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.874168 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 18:40:36 crc kubenswrapper[4792]: I1202 18:40:36.943706 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.017720 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.076008 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.272480 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.294724 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.469224 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.528494 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.661686 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.689105 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.720755 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.758734 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.785148 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.797617 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.854267 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.875449 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 18:40:37 crc kubenswrapper[4792]: I1202 18:40:37.993489 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.053951 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.279661 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.293465 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.323296 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.361638 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.373741 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.410229 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.513664 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.585202 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.601188 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.777493 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.833331 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.842571 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.858183 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.874245 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.875687 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 18:40:38 crc kubenswrapper[4792]: I1202 18:40:38.928696 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.020292 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.031205 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.044557 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.064886 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.096299 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.104693 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.148266 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.154944 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.338433 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.409192 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.438100 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.571298 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.583651 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.607656 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.652735 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.702240 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.716484 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.740015 4792 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.752791 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.769419 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.812621 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.944064 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 18:40:39 crc kubenswrapper[4792]: I1202 18:40:39.966845 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.019316 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.118128 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.141644 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.150626 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.162444 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.164904 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.224792 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.279016 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.292789 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.367344 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.370246 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.382264 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.454459 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.459466 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.525896 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.632721 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.665488 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.734763 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.796774 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.904147 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.973964 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 18:40:40 crc kubenswrapper[4792]: I1202 18:40:40.991291 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.029475 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.144850 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.158699 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.271784 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.340466 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.371836 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.372201 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.462754 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.516058 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.564779 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.582405 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.645860 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.806737 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.886336 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.926919 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.930361 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.939391 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.952037 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 18:40:41 crc kubenswrapper[4792]: I1202 18:40:41.982217 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.089352 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.138602 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.160172 4792 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.166511 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-hgblz","openshift-marketplace/redhat-operators-x7tgb"] Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.166624 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.174201 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.199190 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.199156072 podStartE2EDuration="22.199156072s" podCreationTimestamp="2025-12-02 18:40:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:40:42.197257956 +0000 UTC m=+272.970150294" watchObservedRunningTime="2025-12-02 18:40:42.199156072 +0000 UTC m=+272.972048430" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.201018 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.367502 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.465746 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.500941 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.515270 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.518444 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.531098 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.539968 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.587135 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.628774 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.686603 4792 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.686870 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446" gracePeriod=5 Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.689494 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.787635 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.870065 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 18:40:42 crc kubenswrapper[4792]: I1202 18:40:42.927786 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.080087 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.194109 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.301950 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.362779 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.385982 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.390269 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.445614 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.481146 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.550900 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" path="/var/lib/kubelet/pods/04246b55-2809-4c64-abaf-9bed254d0e80/volumes" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.552875 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" path="/var/lib/kubelet/pods/8bad1c4d-9724-410d-8651-0a8c7c1d92b3/volumes" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.574646 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.730174 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.781614 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 18:40:43 crc kubenswrapper[4792]: I1202 18:40:43.960316 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.212336 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.249478 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.295330 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng"] Dec 02 18:40:44 crc kubenswrapper[4792]: E1202 18:40:44.295988 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="extract-content" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.296103 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="extract-content" Dec 02 18:40:44 crc kubenswrapper[4792]: E1202 18:40:44.296194 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" containerName="installer" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.296271 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" containerName="installer" Dec 02 18:40:44 crc kubenswrapper[4792]: E1202 18:40:44.296355 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="registry-server" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.296436 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="registry-server" Dec 02 18:40:44 crc kubenswrapper[4792]: E1202 18:40:44.296549 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="extract-utilities" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.296648 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="extract-utilities" Dec 02 18:40:44 crc kubenswrapper[4792]: E1202 18:40:44.296736 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.296817 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 18:40:44 crc kubenswrapper[4792]: E1202 18:40:44.296905 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" containerName="oauth-openshift" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.296997 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" containerName="oauth-openshift" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.297241 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bad1c4d-9724-410d-8651-0a8c7c1d92b3" containerName="registry-server" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.297350 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="04246b55-2809-4c64-abaf-9bed254d0e80" containerName="oauth-openshift" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.297447 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.300399 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd6bd65f-0f46-42b5-b74a-d62fa8544aaa" containerName="installer" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.301142 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.305922 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.307672 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.307836 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.307708 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.309206 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.309368 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.309440 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.309454 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.309480 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.309723 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.309829 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.313360 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.317347 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.329733 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.334986 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.335328 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.342156 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng"] Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.364111 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.385467 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-audit-policies\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.385541 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-audit-dir\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.385569 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.385591 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-error\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.385615 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2c97\" (UniqueName: \"kubernetes.io/projected/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-kube-api-access-f2c97\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.385777 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386049 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-serving-cert\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386116 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-cliconfig\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386168 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386300 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386461 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-service-ca\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-session\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386665 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-login\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.386724 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-router-certs\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.447316 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.488879 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-session\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.491627 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-login\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.491748 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-router-certs\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.491805 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-audit-policies\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.491859 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-audit-dir\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.491895 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.491936 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-error\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.491978 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2c97\" (UniqueName: \"kubernetes.io/projected/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-kube-api-access-f2c97\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492019 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492084 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-serving-cert\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492082 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-audit-dir\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492119 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-cliconfig\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492155 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492203 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492254 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-service-ca\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.492684 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-audit-policies\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.493485 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-service-ca\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.494084 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-cliconfig\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.494453 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.499254 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-router-certs\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.499734 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-error\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.500329 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-serving-cert\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.500609 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.501212 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.502190 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-template-login\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.502780 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-system-session\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.511297 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.542513 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2c97\" (UniqueName: \"kubernetes.io/projected/8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937-kube-api-access-f2c97\") pod \"oauth-openshift-66dbbf5c6f-jrcng\" (UID: \"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937\") " pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.558673 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.631234 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.664852 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.764719 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.806402 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.818082 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.914576 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 18:40:44 crc kubenswrapper[4792]: I1202 18:40:44.940543 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng"] Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.041808 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.132387 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.248195 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.319651 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.346545 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.613077 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.746124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" event={"ID":"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937","Type":"ContainerStarted","Data":"68c0f7e2a01f2299c12f779868d62c86152d469351102fba19a971061e987caa"} Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.746202 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" event={"ID":"8bff80bb-f1a0-4c71-a7d0-0bbe0a55b937","Type":"ContainerStarted","Data":"1d92eb99384b4e23c14ba0399a6fac3915c9be1a2b1efc377a5a94f3408a5e01"} Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.748277 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.793644 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.795063 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" podStartSLOduration=70.795038157 podStartE2EDuration="1m10.795038157s" podCreationTimestamp="2025-12-02 18:39:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:40:45.792127623 +0000 UTC m=+276.565019981" watchObservedRunningTime="2025-12-02 18:40:45.795038157 +0000 UTC m=+276.567930485" Dec 02 18:40:45 crc kubenswrapper[4792]: I1202 18:40:45.938943 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 18:40:46 crc kubenswrapper[4792]: I1202 18:40:46.104477 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-66dbbf5c6f-jrcng" Dec 02 18:40:46 crc kubenswrapper[4792]: I1202 18:40:46.374223 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 18:40:46 crc kubenswrapper[4792]: I1202 18:40:46.601096 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 18:40:46 crc kubenswrapper[4792]: I1202 18:40:46.664961 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 18:40:46 crc kubenswrapper[4792]: I1202 18:40:46.718732 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 18:40:47 crc kubenswrapper[4792]: I1202 18:40:47.209271 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.275783 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.275905 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.364877 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.364978 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365023 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365071 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365113 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365147 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365262 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365309 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365737 4792 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365772 4792 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365794 4792 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.365835 4792 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.377004 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.466780 4792 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.777824 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.777880 4792 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446" exitCode=137 Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.777931 4792 scope.go:117] "RemoveContainer" containerID="3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.778055 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.797151 4792 scope.go:117] "RemoveContainer" containerID="3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446" Dec 02 18:40:48 crc kubenswrapper[4792]: E1202 18:40:48.797804 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446\": container with ID starting with 3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446 not found: ID does not exist" containerID="3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446" Dec 02 18:40:48 crc kubenswrapper[4792]: I1202 18:40:48.797887 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446"} err="failed to get container status \"3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446\": rpc error: code = NotFound desc = could not find container \"3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446\": container with ID starting with 3fd61ba757b29b433e5dacfbc7ff03e926e40ed37746170ced3125062d016446 not found: ID does not exist" Dec 02 18:40:49 crc kubenswrapper[4792]: I1202 18:40:49.548455 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 02 18:41:04 crc kubenswrapper[4792]: I1202 18:41:04.889819 4792 generic.go:334] "Generic (PLEG): container finished" podID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerID="a145eade2c7870a50a2ded2d47f9e134150e97e6cd1af473f97c07ccfeb00b9d" exitCode=0 Dec 02 18:41:04 crc kubenswrapper[4792]: I1202 18:41:04.889892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" event={"ID":"ab249412-8c3a-4e5f-b84a-374b19cc1dc9","Type":"ContainerDied","Data":"a145eade2c7870a50a2ded2d47f9e134150e97e6cd1af473f97c07ccfeb00b9d"} Dec 02 18:41:04 crc kubenswrapper[4792]: I1202 18:41:04.890860 4792 scope.go:117] "RemoveContainer" containerID="a145eade2c7870a50a2ded2d47f9e134150e97e6cd1af473f97c07ccfeb00b9d" Dec 02 18:41:05 crc kubenswrapper[4792]: I1202 18:41:05.901849 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" event={"ID":"ab249412-8c3a-4e5f-b84a-374b19cc1dc9","Type":"ContainerStarted","Data":"824c076dc9f4885cd57f68c223cc4a78d0e3dac504b4a0be7cb0952f18f3ef45"} Dec 02 18:41:05 crc kubenswrapper[4792]: I1202 18:41:05.902590 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:41:05 crc kubenswrapper[4792]: I1202 18:41:05.906554 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:41:09 crc kubenswrapper[4792]: I1202 18:41:09.412596 4792 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 02 18:41:42 crc kubenswrapper[4792]: I1202 18:41:42.812495 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f87xj"] Dec 02 18:41:42 crc kubenswrapper[4792]: I1202 18:41:42.813426 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" podUID="39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" containerName="controller-manager" containerID="cri-o://056613063c45fde111acb78cdcca0d1a8819bc3c5d4b121856a75320de201301" gracePeriod=30 Dec 02 18:41:42 crc kubenswrapper[4792]: I1202 18:41:42.938382 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx"] Dec 02 18:41:42 crc kubenswrapper[4792]: I1202 18:41:42.939446 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" podUID="8b019f11-935f-4956-aaf0-c6d2a5d66356" containerName="route-controller-manager" containerID="cri-o://e74eb100c940267ab18be4a9ff5e604cb9379b0b07c40dbc9728a2bde6eb4664" gracePeriod=30 Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.126444 4792 generic.go:334] "Generic (PLEG): container finished" podID="8b019f11-935f-4956-aaf0-c6d2a5d66356" containerID="e74eb100c940267ab18be4a9ff5e604cb9379b0b07c40dbc9728a2bde6eb4664" exitCode=0 Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.126799 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" event={"ID":"8b019f11-935f-4956-aaf0-c6d2a5d66356","Type":"ContainerDied","Data":"e74eb100c940267ab18be4a9ff5e604cb9379b0b07c40dbc9728a2bde6eb4664"} Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.129390 4792 generic.go:334] "Generic (PLEG): container finished" podID="39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" containerID="056613063c45fde111acb78cdcca0d1a8819bc3c5d4b121856a75320de201301" exitCode=0 Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.129417 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" event={"ID":"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb","Type":"ContainerDied","Data":"056613063c45fde111acb78cdcca0d1a8819bc3c5d4b121856a75320de201301"} Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.129435 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" event={"ID":"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb","Type":"ContainerDied","Data":"0fcf1acecb11145e68c5e062d86e3049e5ebdb4881bb2dc4593d4753a42087f6"} Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.129446 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fcf1acecb11145e68c5e062d86e3049e5ebdb4881bb2dc4593d4753a42087f6" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.170221 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.202595 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-serving-cert\") pod \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.202642 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-config\") pod \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.202672 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-client-ca\") pod \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.202776 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-proxy-ca-bundles\") pod \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.202811 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kncrq\" (UniqueName: \"kubernetes.io/projected/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-kube-api-access-kncrq\") pod \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\" (UID: \"39dbc3d9-e001-4ba3-af2d-e19a2016bbeb\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.203600 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-client-ca" (OuterVolumeSpecName: "client-ca") pod "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" (UID: "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.203717 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-config" (OuterVolumeSpecName: "config") pod "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" (UID: "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.203755 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" (UID: "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.204138 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.204164 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.204176 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.208764 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" (UID: "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.209949 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-kube-api-access-kncrq" (OuterVolumeSpecName: "kube-api-access-kncrq") pod "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" (UID: "39dbc3d9-e001-4ba3-af2d-e19a2016bbeb"). InnerVolumeSpecName "kube-api-access-kncrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.232332 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.304532 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-client-ca\") pod \"8b019f11-935f-4956-aaf0-c6d2a5d66356\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.304581 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b019f11-935f-4956-aaf0-c6d2a5d66356-serving-cert\") pod \"8b019f11-935f-4956-aaf0-c6d2a5d66356\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.304615 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqhdw\" (UniqueName: \"kubernetes.io/projected/8b019f11-935f-4956-aaf0-c6d2a5d66356-kube-api-access-fqhdw\") pod \"8b019f11-935f-4956-aaf0-c6d2a5d66356\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.304661 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-config\") pod \"8b019f11-935f-4956-aaf0-c6d2a5d66356\" (UID: \"8b019f11-935f-4956-aaf0-c6d2a5d66356\") " Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.304838 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kncrq\" (UniqueName: \"kubernetes.io/projected/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-kube-api-access-kncrq\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.304849 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.305115 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-client-ca" (OuterVolumeSpecName: "client-ca") pod "8b019f11-935f-4956-aaf0-c6d2a5d66356" (UID: "8b019f11-935f-4956-aaf0-c6d2a5d66356"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.305331 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-config" (OuterVolumeSpecName: "config") pod "8b019f11-935f-4956-aaf0-c6d2a5d66356" (UID: "8b019f11-935f-4956-aaf0-c6d2a5d66356"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.307461 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b019f11-935f-4956-aaf0-c6d2a5d66356-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8b019f11-935f-4956-aaf0-c6d2a5d66356" (UID: "8b019f11-935f-4956-aaf0-c6d2a5d66356"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.307950 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b019f11-935f-4956-aaf0-c6d2a5d66356-kube-api-access-fqhdw" (OuterVolumeSpecName: "kube-api-access-fqhdw") pod "8b019f11-935f-4956-aaf0-c6d2a5d66356" (UID: "8b019f11-935f-4956-aaf0-c6d2a5d66356"). InnerVolumeSpecName "kube-api-access-fqhdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.406394 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.406421 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b019f11-935f-4956-aaf0-c6d2a5d66356-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.406432 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqhdw\" (UniqueName: \"kubernetes.io/projected/8b019f11-935f-4956-aaf0-c6d2a5d66356-kube-api-access-fqhdw\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: I1202 18:41:43.406444 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b019f11-935f-4956-aaf0-c6d2a5d66356-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:43 crc kubenswrapper[4792]: E1202 18:41:43.594063 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39dbc3d9_e001_4ba3_af2d_e19a2016bbeb.slice\": RecentStats: unable to find data in memory cache]" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.136449 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-f87xj" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.136667 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.136651 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx" event={"ID":"8b019f11-935f-4956-aaf0-c6d2a5d66356","Type":"ContainerDied","Data":"289ca8ebb1ced68e950b2876acebd4c063e6783e7b601445c1865a20e65c84b4"} Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.137829 4792 scope.go:117] "RemoveContainer" containerID="e74eb100c940267ab18be4a9ff5e604cb9379b0b07c40dbc9728a2bde6eb4664" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.156511 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx"] Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.159650 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-fjkdx"] Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.182990 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f87xj"] Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.187441 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-f87xj"] Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.252920 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-bfdc67bf-n967h"] Dec 02 18:41:44 crc kubenswrapper[4792]: E1202 18:41:44.255803 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" containerName="controller-manager" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.255843 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" containerName="controller-manager" Dec 02 18:41:44 crc kubenswrapper[4792]: E1202 18:41:44.255911 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b019f11-935f-4956-aaf0-c6d2a5d66356" containerName="route-controller-manager" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.255921 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b019f11-935f-4956-aaf0-c6d2a5d66356" containerName="route-controller-manager" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.256316 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b019f11-935f-4956-aaf0-c6d2a5d66356" containerName="route-controller-manager" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.256334 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" containerName="controller-manager" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.256932 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.259873 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.260082 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.260274 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt"] Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.261320 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.262353 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.263123 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.263273 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.263806 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.264926 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.265145 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.265455 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.265661 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.266083 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.266247 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-bfdc67bf-n967h"] Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.267970 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt"] Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.270072 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.295215 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.319661 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-config\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.319725 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-proxy-ca-bundles\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.319808 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmg48\" (UniqueName: \"kubernetes.io/projected/a48d6b47-407c-4833-be14-74065f54d711-kube-api-access-mmg48\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.319851 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-client-ca\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.319879 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59f9e43-913a-4d79-8a6f-94190f04504d-serving-cert\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.319916 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-client-ca\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.320070 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c22xs\" (UniqueName: \"kubernetes.io/projected/a59f9e43-913a-4d79-8a6f-94190f04504d-kube-api-access-c22xs\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.320135 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-config\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.320226 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48d6b47-407c-4833-be14-74065f54d711-serving-cert\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.421352 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-client-ca\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.421670 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59f9e43-913a-4d79-8a6f-94190f04504d-serving-cert\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.421774 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-client-ca\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.421852 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c22xs\" (UniqueName: \"kubernetes.io/projected/a59f9e43-913a-4d79-8a6f-94190f04504d-kube-api-access-c22xs\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.421928 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-config\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.422006 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48d6b47-407c-4833-be14-74065f54d711-serving-cert\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.422089 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-config\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.422169 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-proxy-ca-bundles\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.422251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmg48\" (UniqueName: \"kubernetes.io/projected/a48d6b47-407c-4833-be14-74065f54d711-kube-api-access-mmg48\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.422536 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-client-ca\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.422877 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-client-ca\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.424290 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-proxy-ca-bundles\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.424424 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-config\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.424717 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-config\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.434468 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48d6b47-407c-4833-be14-74065f54d711-serving-cert\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.436015 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59f9e43-913a-4d79-8a6f-94190f04504d-serving-cert\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.438693 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c22xs\" (UniqueName: \"kubernetes.io/projected/a59f9e43-913a-4d79-8a6f-94190f04504d-kube-api-access-c22xs\") pod \"route-controller-manager-697c475d79-5fppt\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.452107 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmg48\" (UniqueName: \"kubernetes.io/projected/a48d6b47-407c-4833-be14-74065f54d711-kube-api-access-mmg48\") pod \"controller-manager-bfdc67bf-n967h\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.619123 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.623656 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.826407 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-bfdc67bf-n967h"] Dec 02 18:41:44 crc kubenswrapper[4792]: W1202 18:41:44.833901 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda48d6b47_407c_4833_be14_74065f54d711.slice/crio-a08d5c32cacb016a9e371277314adec38f9956725156ae91a427f99368278f47 WatchSource:0}: Error finding container a08d5c32cacb016a9e371277314adec38f9956725156ae91a427f99368278f47: Status 404 returned error can't find the container with id a08d5c32cacb016a9e371277314adec38f9956725156ae91a427f99368278f47 Dec 02 18:41:44 crc kubenswrapper[4792]: I1202 18:41:44.888374 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt"] Dec 02 18:41:44 crc kubenswrapper[4792]: W1202 18:41:44.899375 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda59f9e43_913a_4d79_8a6f_94190f04504d.slice/crio-74ef497fd9a8317ffc683174bd224348a85ecb024f64e6f5db5c50b00676db67 WatchSource:0}: Error finding container 74ef497fd9a8317ffc683174bd224348a85ecb024f64e6f5db5c50b00676db67: Status 404 returned error can't find the container with id 74ef497fd9a8317ffc683174bd224348a85ecb024f64e6f5db5c50b00676db67 Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.143769 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" event={"ID":"a48d6b47-407c-4833-be14-74065f54d711","Type":"ContainerStarted","Data":"5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b"} Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.143832 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" event={"ID":"a48d6b47-407c-4833-be14-74065f54d711","Type":"ContainerStarted","Data":"a08d5c32cacb016a9e371277314adec38f9956725156ae91a427f99368278f47"} Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.143851 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.145686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" event={"ID":"a59f9e43-913a-4d79-8a6f-94190f04504d","Type":"ContainerStarted","Data":"26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556"} Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.145737 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" event={"ID":"a59f9e43-913a-4d79-8a6f-94190f04504d","Type":"ContainerStarted","Data":"74ef497fd9a8317ffc683174bd224348a85ecb024f64e6f5db5c50b00676db67"} Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.145900 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.149948 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.169171 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" podStartSLOduration=3.169135833 podStartE2EDuration="3.169135833s" podCreationTimestamp="2025-12-02 18:41:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:41:45.163189849 +0000 UTC m=+335.936082177" watchObservedRunningTime="2025-12-02 18:41:45.169135833 +0000 UTC m=+335.942028201" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.181070 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" podStartSLOduration=3.181046401 podStartE2EDuration="3.181046401s" podCreationTimestamp="2025-12-02 18:41:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:41:45.179713202 +0000 UTC m=+335.952605550" watchObservedRunningTime="2025-12-02 18:41:45.181046401 +0000 UTC m=+335.953938729" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.303748 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.547411 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39dbc3d9-e001-4ba3-af2d-e19a2016bbeb" path="/var/lib/kubelet/pods/39dbc3d9-e001-4ba3-af2d-e19a2016bbeb/volumes" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.548596 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b019f11-935f-4956-aaf0-c6d2a5d66356" path="/var/lib/kubelet/pods/8b019f11-935f-4956-aaf0-c6d2a5d66356/volumes" Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.719757 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-bfdc67bf-n967h"] Dec 02 18:41:45 crc kubenswrapper[4792]: I1202 18:41:45.731352 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt"] Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.155570 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" podUID="a48d6b47-407c-4833-be14-74065f54d711" containerName="controller-manager" containerID="cri-o://5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b" gracePeriod=30 Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.155846 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" podUID="a59f9e43-913a-4d79-8a6f-94190f04504d" containerName="route-controller-manager" containerID="cri-o://26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556" gracePeriod=30 Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.573864 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.578216 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.616816 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl"] Dec 02 18:41:47 crc kubenswrapper[4792]: E1202 18:41:47.617118 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a48d6b47-407c-4833-be14-74065f54d711" containerName="controller-manager" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.617131 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a48d6b47-407c-4833-be14-74065f54d711" containerName="controller-manager" Dec 02 18:41:47 crc kubenswrapper[4792]: E1202 18:41:47.617148 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a59f9e43-913a-4d79-8a6f-94190f04504d" containerName="route-controller-manager" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.617154 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a59f9e43-913a-4d79-8a6f-94190f04504d" containerName="route-controller-manager" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.617242 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a59f9e43-913a-4d79-8a6f-94190f04504d" containerName="route-controller-manager" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.617255 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a48d6b47-407c-4833-be14-74065f54d711" containerName="controller-manager" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.617682 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.621722 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl"] Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662099 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48d6b47-407c-4833-be14-74065f54d711-serving-cert\") pod \"a48d6b47-407c-4833-be14-74065f54d711\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662153 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-config\") pod \"a48d6b47-407c-4833-be14-74065f54d711\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662193 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-proxy-ca-bundles\") pod \"a48d6b47-407c-4833-be14-74065f54d711\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662221 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmg48\" (UniqueName: \"kubernetes.io/projected/a48d6b47-407c-4833-be14-74065f54d711-kube-api-access-mmg48\") pod \"a48d6b47-407c-4833-be14-74065f54d711\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662262 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-config\") pod \"a59f9e43-913a-4d79-8a6f-94190f04504d\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662295 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59f9e43-913a-4d79-8a6f-94190f04504d-serving-cert\") pod \"a59f9e43-913a-4d79-8a6f-94190f04504d\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662321 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-client-ca\") pod \"a59f9e43-913a-4d79-8a6f-94190f04504d\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662342 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-client-ca\") pod \"a48d6b47-407c-4833-be14-74065f54d711\" (UID: \"a48d6b47-407c-4833-be14-74065f54d711\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662359 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c22xs\" (UniqueName: \"kubernetes.io/projected/a59f9e43-913a-4d79-8a6f-94190f04504d-kube-api-access-c22xs\") pod \"a59f9e43-913a-4d79-8a6f-94190f04504d\" (UID: \"a59f9e43-913a-4d79-8a6f-94190f04504d\") " Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662581 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-config\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662606 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-serving-cert\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-client-ca\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.662677 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7fpq\" (UniqueName: \"kubernetes.io/projected/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-kube-api-access-d7fpq\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.663606 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-client-ca" (OuterVolumeSpecName: "client-ca") pod "a48d6b47-407c-4833-be14-74065f54d711" (UID: "a48d6b47-407c-4833-be14-74065f54d711"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.663739 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a48d6b47-407c-4833-be14-74065f54d711" (UID: "a48d6b47-407c-4833-be14-74065f54d711"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.663927 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-client-ca" (OuterVolumeSpecName: "client-ca") pod "a59f9e43-913a-4d79-8a6f-94190f04504d" (UID: "a59f9e43-913a-4d79-8a6f-94190f04504d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.663989 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-config" (OuterVolumeSpecName: "config") pod "a59f9e43-913a-4d79-8a6f-94190f04504d" (UID: "a59f9e43-913a-4d79-8a6f-94190f04504d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.664127 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-config" (OuterVolumeSpecName: "config") pod "a48d6b47-407c-4833-be14-74065f54d711" (UID: "a48d6b47-407c-4833-be14-74065f54d711"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.668514 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a48d6b47-407c-4833-be14-74065f54d711-kube-api-access-mmg48" (OuterVolumeSpecName: "kube-api-access-mmg48") pod "a48d6b47-407c-4833-be14-74065f54d711" (UID: "a48d6b47-407c-4833-be14-74065f54d711"). InnerVolumeSpecName "kube-api-access-mmg48". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.668842 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a59f9e43-913a-4d79-8a6f-94190f04504d-kube-api-access-c22xs" (OuterVolumeSpecName: "kube-api-access-c22xs") pod "a59f9e43-913a-4d79-8a6f-94190f04504d" (UID: "a59f9e43-913a-4d79-8a6f-94190f04504d"). InnerVolumeSpecName "kube-api-access-c22xs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.668858 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a48d6b47-407c-4833-be14-74065f54d711-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a48d6b47-407c-4833-be14-74065f54d711" (UID: "a48d6b47-407c-4833-be14-74065f54d711"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.670242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a59f9e43-913a-4d79-8a6f-94190f04504d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a59f9e43-913a-4d79-8a6f-94190f04504d" (UID: "a59f9e43-913a-4d79-8a6f-94190f04504d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764100 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7fpq\" (UniqueName: \"kubernetes.io/projected/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-kube-api-access-d7fpq\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764283 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-config\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764354 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-serving-cert\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764474 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-client-ca\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764632 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a59f9e43-913a-4d79-8a6f-94190f04504d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764674 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764699 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764722 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c22xs\" (UniqueName: \"kubernetes.io/projected/a59f9e43-913a-4d79-8a6f-94190f04504d-kube-api-access-c22xs\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764749 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a48d6b47-407c-4833-be14-74065f54d711-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764774 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764797 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a48d6b47-407c-4833-be14-74065f54d711-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764824 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmg48\" (UniqueName: \"kubernetes.io/projected/a48d6b47-407c-4833-be14-74065f54d711-kube-api-access-mmg48\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.764849 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a59f9e43-913a-4d79-8a6f-94190f04504d-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.765617 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-client-ca\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.765758 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-config\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.770915 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-serving-cert\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.786444 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7fpq\" (UniqueName: \"kubernetes.io/projected/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-kube-api-access-d7fpq\") pod \"route-controller-manager-685f6d45b6-tfchl\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:47 crc kubenswrapper[4792]: I1202 18:41:47.933567 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.167183 4792 generic.go:334] "Generic (PLEG): container finished" podID="a59f9e43-913a-4d79-8a6f-94190f04504d" containerID="26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556" exitCode=0 Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.167248 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" event={"ID":"a59f9e43-913a-4d79-8a6f-94190f04504d","Type":"ContainerDied","Data":"26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556"} Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.167281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" event={"ID":"a59f9e43-913a-4d79-8a6f-94190f04504d","Type":"ContainerDied","Data":"74ef497fd9a8317ffc683174bd224348a85ecb024f64e6f5db5c50b00676db67"} Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.167304 4792 scope.go:117] "RemoveContainer" containerID="26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.167411 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.171839 4792 generic.go:334] "Generic (PLEG): container finished" podID="a48d6b47-407c-4833-be14-74065f54d711" containerID="5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b" exitCode=0 Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.171887 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" event={"ID":"a48d6b47-407c-4833-be14-74065f54d711","Type":"ContainerDied","Data":"5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b"} Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.171917 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" event={"ID":"a48d6b47-407c-4833-be14-74065f54d711","Type":"ContainerDied","Data":"a08d5c32cacb016a9e371277314adec38f9956725156ae91a427f99368278f47"} Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.171975 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-bfdc67bf-n967h" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.185752 4792 scope.go:117] "RemoveContainer" containerID="26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556" Dec 02 18:41:48 crc kubenswrapper[4792]: E1202 18:41:48.188997 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556\": container with ID starting with 26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556 not found: ID does not exist" containerID="26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.189043 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556"} err="failed to get container status \"26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556\": rpc error: code = NotFound desc = could not find container \"26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556\": container with ID starting with 26b9ed3e67279a24c857c656b09db602da56d201af48bb196ee7da38b077c556 not found: ID does not exist" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.189070 4792 scope.go:117] "RemoveContainer" containerID="5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.210153 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt"] Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.210744 4792 scope.go:117] "RemoveContainer" containerID="5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b" Dec 02 18:41:48 crc kubenswrapper[4792]: E1202 18:41:48.211456 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b\": container with ID starting with 5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b not found: ID does not exist" containerID="5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.211556 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b"} err="failed to get container status \"5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b\": rpc error: code = NotFound desc = could not find container \"5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b\": container with ID starting with 5008d4acc9980b4bb10be3cb30a4c9a6ef3693af1ec9e592b311ea53768fde0b not found: ID does not exist" Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.220974 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-697c475d79-5fppt"] Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.225617 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-bfdc67bf-n967h"] Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.230109 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-bfdc67bf-n967h"] Dec 02 18:41:48 crc kubenswrapper[4792]: I1202 18:41:48.377179 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl"] Dec 02 18:41:49 crc kubenswrapper[4792]: I1202 18:41:49.179260 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" event={"ID":"0c3e172e-c1d8-428f-8524-f40dd50aa8c2","Type":"ContainerStarted","Data":"380c063508e650c8292ecb313e1699d2d69c337b7e5c7472c2d5df1322a63ccd"} Dec 02 18:41:49 crc kubenswrapper[4792]: I1202 18:41:49.179327 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" event={"ID":"0c3e172e-c1d8-428f-8524-f40dd50aa8c2","Type":"ContainerStarted","Data":"1fc2db9edf6da67dd53479d15b818582832bbf20c10abbf7bdacd680b8f38f59"} Dec 02 18:41:49 crc kubenswrapper[4792]: I1202 18:41:49.179718 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:49 crc kubenswrapper[4792]: I1202 18:41:49.186337 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:41:49 crc kubenswrapper[4792]: I1202 18:41:49.194782 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" podStartSLOduration=4.194765884 podStartE2EDuration="4.194765884s" podCreationTimestamp="2025-12-02 18:41:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:41:49.194339722 +0000 UTC m=+339.967232060" watchObservedRunningTime="2025-12-02 18:41:49.194765884 +0000 UTC m=+339.967658212" Dec 02 18:41:49 crc kubenswrapper[4792]: I1202 18:41:49.546564 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a48d6b47-407c-4833-be14-74065f54d711" path="/var/lib/kubelet/pods/a48d6b47-407c-4833-be14-74065f54d711/volumes" Dec 02 18:41:49 crc kubenswrapper[4792]: I1202 18:41:49.547236 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a59f9e43-913a-4d79-8a6f-94190f04504d" path="/var/lib/kubelet/pods/a59f9e43-913a-4d79-8a6f-94190f04504d/volumes" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.261114 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft"] Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.263081 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.268942 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.269405 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.272393 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.272848 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.272910 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.273052 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.280890 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.282595 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft"] Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.307610 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-serving-cert\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.307712 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-client-ca\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.307763 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfhm4\" (UniqueName: \"kubernetes.io/projected/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-kube-api-access-cfhm4\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.307848 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-config\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.307895 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-proxy-ca-bundles\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.410162 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-config\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.410264 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-proxy-ca-bundles\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.410401 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-serving-cert\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.410461 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-client-ca\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.410515 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfhm4\" (UniqueName: \"kubernetes.io/projected/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-kube-api-access-cfhm4\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.412891 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-client-ca\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.413212 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-config\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.415232 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-proxy-ca-bundles\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.419812 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-serving-cert\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.443745 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfhm4\" (UniqueName: \"kubernetes.io/projected/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-kube-api-access-cfhm4\") pod \"controller-manager-7bcdf8b5f6-vlzft\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.626313 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:50 crc kubenswrapper[4792]: I1202 18:41:50.830704 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft"] Dec 02 18:41:50 crc kubenswrapper[4792]: W1202 18:41:50.834830 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87d70b6e_efd5_40b0_be2b_fe6ccbdc3a89.slice/crio-d35af30ab960a37ec3aa56a771f4d8619668c16cbd8a11a07586bb9f16f14355 WatchSource:0}: Error finding container d35af30ab960a37ec3aa56a771f4d8619668c16cbd8a11a07586bb9f16f14355: Status 404 returned error can't find the container with id d35af30ab960a37ec3aa56a771f4d8619668c16cbd8a11a07586bb9f16f14355 Dec 02 18:41:51 crc kubenswrapper[4792]: I1202 18:41:51.198147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" event={"ID":"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89","Type":"ContainerStarted","Data":"d35af30ab960a37ec3aa56a771f4d8619668c16cbd8a11a07586bb9f16f14355"} Dec 02 18:41:52 crc kubenswrapper[4792]: I1202 18:41:52.206637 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" event={"ID":"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89","Type":"ContainerStarted","Data":"ed3aa317af8d34313a314d517393c8968541aeabc3f4bf812f084c5411081231"} Dec 02 18:41:52 crc kubenswrapper[4792]: I1202 18:41:52.207632 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:52 crc kubenswrapper[4792]: I1202 18:41:52.216214 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:41:52 crc kubenswrapper[4792]: I1202 18:41:52.229503 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" podStartSLOduration=7.229476438 podStartE2EDuration="7.229476438s" podCreationTimestamp="2025-12-02 18:41:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:41:52.224360358 +0000 UTC m=+342.997252686" watchObservedRunningTime="2025-12-02 18:41:52.229476438 +0000 UTC m=+343.002368766" Dec 02 18:42:02 crc kubenswrapper[4792]: I1202 18:42:02.846836 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl"] Dec 02 18:42:02 crc kubenswrapper[4792]: I1202 18:42:02.847875 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" podUID="0c3e172e-c1d8-428f-8524-f40dd50aa8c2" containerName="route-controller-manager" containerID="cri-o://380c063508e650c8292ecb313e1699d2d69c337b7e5c7472c2d5df1322a63ccd" gracePeriod=30 Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.280574 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c3e172e-c1d8-428f-8524-f40dd50aa8c2" containerID="380c063508e650c8292ecb313e1699d2d69c337b7e5c7472c2d5df1322a63ccd" exitCode=0 Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.280666 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" event={"ID":"0c3e172e-c1d8-428f-8524-f40dd50aa8c2","Type":"ContainerDied","Data":"380c063508e650c8292ecb313e1699d2d69c337b7e5c7472c2d5df1322a63ccd"} Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.438046 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.500162 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-config\") pod \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.501641 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-config" (OuterVolumeSpecName: "config") pod "0c3e172e-c1d8-428f-8524-f40dd50aa8c2" (UID: "0c3e172e-c1d8-428f-8524-f40dd50aa8c2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.601572 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-serving-cert\") pod \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.601775 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-client-ca\") pod \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.602586 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-client-ca" (OuterVolumeSpecName: "client-ca") pod "0c3e172e-c1d8-428f-8524-f40dd50aa8c2" (UID: "0c3e172e-c1d8-428f-8524-f40dd50aa8c2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.602900 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7fpq\" (UniqueName: \"kubernetes.io/projected/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-kube-api-access-d7fpq\") pod \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\" (UID: \"0c3e172e-c1d8-428f-8524-f40dd50aa8c2\") " Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.603406 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.603586 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.610873 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0c3e172e-c1d8-428f-8524-f40dd50aa8c2" (UID: "0c3e172e-c1d8-428f-8524-f40dd50aa8c2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.613797 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-kube-api-access-d7fpq" (OuterVolumeSpecName: "kube-api-access-d7fpq") pod "0c3e172e-c1d8-428f-8524-f40dd50aa8c2" (UID: "0c3e172e-c1d8-428f-8524-f40dd50aa8c2"). InnerVolumeSpecName "kube-api-access-d7fpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.704603 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:03 crc kubenswrapper[4792]: I1202 18:42:03.704662 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7fpq\" (UniqueName: \"kubernetes.io/projected/0c3e172e-c1d8-428f-8524-f40dd50aa8c2-kube-api-access-d7fpq\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.273462 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng"] Dec 02 18:42:04 crc kubenswrapper[4792]: E1202 18:42:04.273889 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c3e172e-c1d8-428f-8524-f40dd50aa8c2" containerName="route-controller-manager" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.273911 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c3e172e-c1d8-428f-8524-f40dd50aa8c2" containerName="route-controller-manager" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.274494 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c3e172e-c1d8-428f-8524-f40dd50aa8c2" containerName="route-controller-manager" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.275417 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.291921 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng"] Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.313871 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" event={"ID":"0c3e172e-c1d8-428f-8524-f40dd50aa8c2","Type":"ContainerDied","Data":"1fc2db9edf6da67dd53479d15b818582832bbf20c10abbf7bdacd680b8f38f59"} Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.313974 4792 scope.go:117] "RemoveContainer" containerID="380c063508e650c8292ecb313e1699d2d69c337b7e5c7472c2d5df1322a63ccd" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.313992 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.362243 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl"] Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.369744 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-685f6d45b6-tfchl"] Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.428005 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-client-ca\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.428078 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msnvw\" (UniqueName: \"kubernetes.io/projected/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-kube-api-access-msnvw\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.428131 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-config\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.428193 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-serving-cert\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.529712 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-client-ca\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.529805 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msnvw\" (UniqueName: \"kubernetes.io/projected/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-kube-api-access-msnvw\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.529848 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-config\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.529895 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-serving-cert\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.531324 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-config\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.531580 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-client-ca\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.539442 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-serving-cert\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.558778 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msnvw\" (UniqueName: \"kubernetes.io/projected/24c629d7-f9e1-4f57-91e7-b4fb6e69a227-kube-api-access-msnvw\") pod \"route-controller-manager-654c864c59-p48ng\" (UID: \"24c629d7-f9e1-4f57-91e7-b4fb6e69a227\") " pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:04 crc kubenswrapper[4792]: I1202 18:42:04.632579 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:05 crc kubenswrapper[4792]: I1202 18:42:05.096797 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng"] Dec 02 18:42:05 crc kubenswrapper[4792]: I1202 18:42:05.325945 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" event={"ID":"24c629d7-f9e1-4f57-91e7-b4fb6e69a227","Type":"ContainerStarted","Data":"01e5e2322933e93cfc92f759c281a29762d9683028c9461de551df67461505d9"} Dec 02 18:42:05 crc kubenswrapper[4792]: I1202 18:42:05.326003 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" event={"ID":"24c629d7-f9e1-4f57-91e7-b4fb6e69a227","Type":"ContainerStarted","Data":"83bec594edf6dbb93e5165a3b07b678c46d1d38eec4ab8d836bffc46f123db08"} Dec 02 18:42:05 crc kubenswrapper[4792]: I1202 18:42:05.326028 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:05 crc kubenswrapper[4792]: I1202 18:42:05.357513 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" podStartSLOduration=3.357488849 podStartE2EDuration="3.357488849s" podCreationTimestamp="2025-12-02 18:42:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:42:05.352510814 +0000 UTC m=+356.125403182" watchObservedRunningTime="2025-12-02 18:42:05.357488849 +0000 UTC m=+356.130381197" Dec 02 18:42:05 crc kubenswrapper[4792]: I1202 18:42:05.548781 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c3e172e-c1d8-428f-8524-f40dd50aa8c2" path="/var/lib/kubelet/pods/0c3e172e-c1d8-428f-8524-f40dd50aa8c2/volumes" Dec 02 18:42:05 crc kubenswrapper[4792]: I1202 18:42:05.726418 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-654c864c59-p48ng" Dec 02 18:42:08 crc kubenswrapper[4792]: I1202 18:42:08.081446 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:42:08 crc kubenswrapper[4792]: I1202 18:42:08.081589 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.275169 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zszzb"] Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.276233 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.292218 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zszzb"] Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312365 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-registry-tls\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312416 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f230c0c-67ec-4d94-804d-aed590ed041d-trusted-ca\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312457 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7f230c0c-67ec-4d94-804d-aed590ed041d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312489 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7f230c0c-67ec-4d94-804d-aed590ed041d-registry-certificates\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312629 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-bound-sa-token\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312794 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68bzc\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-kube-api-access-68bzc\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.312850 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7f230c0c-67ec-4d94-804d-aed590ed041d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.341171 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.414615 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7f230c0c-67ec-4d94-804d-aed590ed041d-registry-certificates\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.414712 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-bound-sa-token\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.414773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68bzc\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-kube-api-access-68bzc\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.414813 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7f230c0c-67ec-4d94-804d-aed590ed041d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.414853 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-registry-tls\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.414873 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f230c0c-67ec-4d94-804d-aed590ed041d-trusted-ca\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.414911 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7f230c0c-67ec-4d94-804d-aed590ed041d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.415471 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7f230c0c-67ec-4d94-804d-aed590ed041d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.416180 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7f230c0c-67ec-4d94-804d-aed590ed041d-registry-certificates\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.419936 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f230c0c-67ec-4d94-804d-aed590ed041d-trusted-ca\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.425432 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-registry-tls\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.429140 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7f230c0c-67ec-4d94-804d-aed590ed041d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.439176 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68bzc\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-kube-api-access-68bzc\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.441432 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7f230c0c-67ec-4d94-804d-aed590ed041d-bound-sa-token\") pod \"image-registry-66df7c8f76-zszzb\" (UID: \"7f230c0c-67ec-4d94-804d-aed590ed041d\") " pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:09 crc kubenswrapper[4792]: I1202 18:42:09.604838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:10 crc kubenswrapper[4792]: I1202 18:42:10.038848 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zszzb"] Dec 02 18:42:10 crc kubenswrapper[4792]: W1202 18:42:10.042609 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f230c0c_67ec_4d94_804d_aed590ed041d.slice/crio-08eab1297a6f1c3bb35f9d53ec5d41ffb9b1a7472bae7123a3f4551eb18ecc52 WatchSource:0}: Error finding container 08eab1297a6f1c3bb35f9d53ec5d41ffb9b1a7472bae7123a3f4551eb18ecc52: Status 404 returned error can't find the container with id 08eab1297a6f1c3bb35f9d53ec5d41ffb9b1a7472bae7123a3f4551eb18ecc52 Dec 02 18:42:10 crc kubenswrapper[4792]: I1202 18:42:10.366029 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" event={"ID":"7f230c0c-67ec-4d94-804d-aed590ed041d","Type":"ContainerStarted","Data":"08eab1297a6f1c3bb35f9d53ec5d41ffb9b1a7472bae7123a3f4551eb18ecc52"} Dec 02 18:42:11 crc kubenswrapper[4792]: I1202 18:42:11.374616 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" event={"ID":"7f230c0c-67ec-4d94-804d-aed590ed041d","Type":"ContainerStarted","Data":"0fce76036dc746a66e4a504d2a1f26d8fbe6495cba761ffc95b90100c4e78d39"} Dec 02 18:42:12 crc kubenswrapper[4792]: I1202 18:42:12.381777 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:12 crc kubenswrapper[4792]: I1202 18:42:12.417515 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" podStartSLOduration=3.417484864 podStartE2EDuration="3.417484864s" podCreationTimestamp="2025-12-02 18:42:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:42:12.416221298 +0000 UTC m=+363.189113626" watchObservedRunningTime="2025-12-02 18:42:12.417484864 +0000 UTC m=+363.190377242" Dec 02 18:42:22 crc kubenswrapper[4792]: I1202 18:42:22.831725 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft"] Dec 02 18:42:22 crc kubenswrapper[4792]: I1202 18:42:22.832996 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" podUID="87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" containerName="controller-manager" containerID="cri-o://ed3aa317af8d34313a314d517393c8968541aeabc3f4bf812f084c5411081231" gracePeriod=30 Dec 02 18:42:23 crc kubenswrapper[4792]: I1202 18:42:23.771839 4792 generic.go:334] "Generic (PLEG): container finished" podID="87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" containerID="ed3aa317af8d34313a314d517393c8968541aeabc3f4bf812f084c5411081231" exitCode=0 Dec 02 18:42:23 crc kubenswrapper[4792]: I1202 18:42:23.771917 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" event={"ID":"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89","Type":"ContainerDied","Data":"ed3aa317af8d34313a314d517393c8968541aeabc3f4bf812f084c5411081231"} Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.058931 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.124334 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5"] Dec 02 18:42:24 crc kubenswrapper[4792]: E1202 18:42:24.125218 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" containerName="controller-manager" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.125251 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" containerName="controller-manager" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.125751 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" containerName="controller-manager" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.130134 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.141164 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5"] Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.195740 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfhm4\" (UniqueName: \"kubernetes.io/projected/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-kube-api-access-cfhm4\") pod \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.195951 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-serving-cert\") pod \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196001 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-client-ca\") pod \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196052 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-config\") pod \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196080 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-proxy-ca-bundles\") pod \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\" (UID: \"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89\") " Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196371 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-proxy-ca-bundles\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196426 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2ssm\" (UniqueName: \"kubernetes.io/projected/6bf8b456-82d5-4566-8534-3f810e710475-kube-api-access-p2ssm\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196452 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-client-ca\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196482 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-config\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.196540 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf8b456-82d5-4566-8534-3f810e710475-serving-cert\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.197676 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-client-ca" (OuterVolumeSpecName: "client-ca") pod "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" (UID: "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.197790 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" (UID: "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.197964 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-config" (OuterVolumeSpecName: "config") pod "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" (UID: "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.204358 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" (UID: "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.217976 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-kube-api-access-cfhm4" (OuterVolumeSpecName: "kube-api-access-cfhm4") pod "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" (UID: "87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89"). InnerVolumeSpecName "kube-api-access-cfhm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297503 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf8b456-82d5-4566-8534-3f810e710475-serving-cert\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297624 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-proxy-ca-bundles\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297666 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2ssm\" (UniqueName: \"kubernetes.io/projected/6bf8b456-82d5-4566-8534-3f810e710475-kube-api-access-p2ssm\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297692 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-client-ca\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297718 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-config\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297763 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfhm4\" (UniqueName: \"kubernetes.io/projected/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-kube-api-access-cfhm4\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297776 4792 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.297919 4792 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.299177 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-client-ca\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.299797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-proxy-ca-bundles\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.300290 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.300329 4792 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.300288 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bf8b456-82d5-4566-8534-3f810e710475-config\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.303074 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6bf8b456-82d5-4566-8534-3f810e710475-serving-cert\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.317849 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2ssm\" (UniqueName: \"kubernetes.io/projected/6bf8b456-82d5-4566-8534-3f810e710475-kube-api-access-p2ssm\") pod \"controller-manager-68bbf7cfcc-h8fs5\" (UID: \"6bf8b456-82d5-4566-8534-3f810e710475\") " pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.457275 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.784835 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" event={"ID":"87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89","Type":"ContainerDied","Data":"d35af30ab960a37ec3aa56a771f4d8619668c16cbd8a11a07586bb9f16f14355"} Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.785292 4792 scope.go:117] "RemoveContainer" containerID="ed3aa317af8d34313a314d517393c8968541aeabc3f4bf812f084c5411081231" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.784923 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft" Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.828706 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft"] Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.836366 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7bcdf8b5f6-vlzft"] Dec 02 18:42:24 crc kubenswrapper[4792]: I1202 18:42:24.964957 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5"] Dec 02 18:42:24 crc kubenswrapper[4792]: W1202 18:42:24.971322 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6bf8b456_82d5_4566_8534_3f810e710475.slice/crio-37ea8df68cc0eb4f6634aa8340a03ef43cbc5cb9c6871de035dc3bb72aedf097 WatchSource:0}: Error finding container 37ea8df68cc0eb4f6634aa8340a03ef43cbc5cb9c6871de035dc3bb72aedf097: Status 404 returned error can't find the container with id 37ea8df68cc0eb4f6634aa8340a03ef43cbc5cb9c6871de035dc3bb72aedf097 Dec 02 18:42:25 crc kubenswrapper[4792]: I1202 18:42:25.549836 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89" path="/var/lib/kubelet/pods/87d70b6e-efd5-40b0-be2b-fe6ccbdc3a89/volumes" Dec 02 18:42:25 crc kubenswrapper[4792]: I1202 18:42:25.797222 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" event={"ID":"6bf8b456-82d5-4566-8534-3f810e710475","Type":"ContainerStarted","Data":"c214da8f44b65954827127e7b7fca563059b57f8d3068b2a2098a91d7b4ee434"} Dec 02 18:42:25 crc kubenswrapper[4792]: I1202 18:42:25.797655 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:25 crc kubenswrapper[4792]: I1202 18:42:25.797679 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" event={"ID":"6bf8b456-82d5-4566-8534-3f810e710475","Type":"ContainerStarted","Data":"37ea8df68cc0eb4f6634aa8340a03ef43cbc5cb9c6871de035dc3bb72aedf097"} Dec 02 18:42:25 crc kubenswrapper[4792]: I1202 18:42:25.806981 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" Dec 02 18:42:25 crc kubenswrapper[4792]: I1202 18:42:25.826643 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-68bbf7cfcc-h8fs5" podStartSLOduration=3.8266085690000002 podStartE2EDuration="3.826608569s" podCreationTimestamp="2025-12-02 18:42:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:42:25.82079771 +0000 UTC m=+376.593690098" watchObservedRunningTime="2025-12-02 18:42:25.826608569 +0000 UTC m=+376.599500917" Dec 02 18:42:29 crc kubenswrapper[4792]: I1202 18:42:29.612339 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-zszzb" Dec 02 18:42:29 crc kubenswrapper[4792]: I1202 18:42:29.696867 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nr8ml"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.055476 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p6vhp"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.056713 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p6vhp" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="registry-server" containerID="cri-o://630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606" gracePeriod=30 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.060368 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z4jbn"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.060996 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z4jbn" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="registry-server" containerID="cri-o://8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f" gracePeriod=30 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.066171 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nzgzn"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.066553 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" containerID="cri-o://824c076dc9f4885cd57f68c223cc4a78d0e3dac504b4a0be7cb0952f18f3ef45" gracePeriod=30 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.079679 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrlnq"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.080004 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hrlnq" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="registry-server" containerID="cri-o://505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e" gracePeriod=30 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.095412 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c85tx"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.095789 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c85tx" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="registry-server" containerID="cri-o://367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4" gracePeriod=30 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.100290 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7r9rs"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.110354 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.120501 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7r9rs"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.209734 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.209895 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.210176 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjtkv\" (UniqueName: \"kubernetes.io/projected/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-kube-api-access-wjtkv\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.312327 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.312408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjtkv\" (UniqueName: \"kubernetes.io/projected/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-kube-api-access-wjtkv\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.312442 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.314284 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.321498 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.330697 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjtkv\" (UniqueName: \"kubernetes.io/projected/1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e-kube-api-access-wjtkv\") pod \"marketplace-operator-79b997595-7r9rs\" (UID: \"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e\") " pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.576635 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.670434 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.717752 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-catalog-content\") pod \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.717859 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-utilities\") pod \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.717948 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ws67\" (UniqueName: \"kubernetes.io/projected/c118dcaa-7c35-4a91-9b16-b3796e95fa86-kube-api-access-9ws67\") pod \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\" (UID: \"c118dcaa-7c35-4a91-9b16-b3796e95fa86\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.718888 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-utilities" (OuterVolumeSpecName: "utilities") pod "c118dcaa-7c35-4a91-9b16-b3796e95fa86" (UID: "c118dcaa-7c35-4a91-9b16-b3796e95fa86"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.723122 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c118dcaa-7c35-4a91-9b16-b3796e95fa86-kube-api-access-9ws67" (OuterVolumeSpecName: "kube-api-access-9ws67") pod "c118dcaa-7c35-4a91-9b16-b3796e95fa86" (UID: "c118dcaa-7c35-4a91-9b16-b3796e95fa86"). InnerVolumeSpecName "kube-api-access-9ws67". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.741503 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c118dcaa-7c35-4a91-9b16-b3796e95fa86" (UID: "c118dcaa-7c35-4a91-9b16-b3796e95fa86"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.747506 4792 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nzgzn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.747591 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.15:8080/healthz\": dial tcp 10.217.0.15:8080: connect: connection refused" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.787258 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.800053 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.812714 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.819204 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.819223 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c118dcaa-7c35-4a91-9b16-b3796e95fa86-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.819233 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ws67\" (UniqueName: \"kubernetes.io/projected/c118dcaa-7c35-4a91-9b16-b3796e95fa86-kube-api-access-9ws67\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.883323 4792 generic.go:334] "Generic (PLEG): container finished" podID="715f891b-4e52-454b-b5c0-22694ef088e8" containerID="367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4" exitCode=0 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.883407 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c85tx" event={"ID":"715f891b-4e52-454b-b5c0-22694ef088e8","Type":"ContainerDied","Data":"367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.883414 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c85tx" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.883443 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c85tx" event={"ID":"715f891b-4e52-454b-b5c0-22694ef088e8","Type":"ContainerDied","Data":"c6cee850e07151adb91fbece71dc354728d6a16354f945fb539951933e2e498d"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.883465 4792 scope.go:117] "RemoveContainer" containerID="367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.888384 4792 generic.go:334] "Generic (PLEG): container finished" podID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerID="505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e" exitCode=0 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.888443 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hrlnq" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.888482 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrlnq" event={"ID":"c118dcaa-7c35-4a91-9b16-b3796e95fa86","Type":"ContainerDied","Data":"505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.888516 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hrlnq" event={"ID":"c118dcaa-7c35-4a91-9b16-b3796e95fa86","Type":"ContainerDied","Data":"9d6bfb666b1b5efc1beac839c2ce935311f349c8ee335b2e4f1e91e77ebeb3d7"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.895812 4792 generic.go:334] "Generic (PLEG): container finished" podID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerID="630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606" exitCode=0 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.895897 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6vhp" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.895900 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6vhp" event={"ID":"bba4322e-397d-4b6a-b52c-14dfeecbf071","Type":"ContainerDied","Data":"630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.896431 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6vhp" event={"ID":"bba4322e-397d-4b6a-b52c-14dfeecbf071","Type":"ContainerDied","Data":"f26e5b950ce6a1657f6bc0e3c4ab8103cb57a2c97e281c4ac4a7b6bdc0a824d8"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.902943 4792 generic.go:334] "Generic (PLEG): container finished" podID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerID="824c076dc9f4885cd57f68c223cc4a78d0e3dac504b4a0be7cb0952f18f3ef45" exitCode=0 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.903019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" event={"ID":"ab249412-8c3a-4e5f-b84a-374b19cc1dc9","Type":"ContainerDied","Data":"824c076dc9f4885cd57f68c223cc4a78d0e3dac504b4a0be7cb0952f18f3ef45"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.911403 4792 scope.go:117] "RemoveContainer" containerID="5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.912043 4792 generic.go:334] "Generic (PLEG): container finished" podID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerID="8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f" exitCode=0 Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.912082 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z4jbn" event={"ID":"77b66620-e883-40d8-8294-b6b4a2f3ad8c","Type":"ContainerDied","Data":"8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.912114 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z4jbn" event={"ID":"77b66620-e883-40d8-8294-b6b4a2f3ad8c","Type":"ContainerDied","Data":"fb0d3350b33429eee2e7735c63c8f8c805033923e2529a44316727ea629ceaf7"} Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.913143 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z4jbn" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921588 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-catalog-content\") pod \"715f891b-4e52-454b-b5c0-22694ef088e8\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921639 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-utilities\") pod \"bba4322e-397d-4b6a-b52c-14dfeecbf071\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921745 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-catalog-content\") pod \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921805 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhrp7\" (UniqueName: \"kubernetes.io/projected/77b66620-e883-40d8-8294-b6b4a2f3ad8c-kube-api-access-dhrp7\") pod \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921837 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-utilities\") pod \"715f891b-4e52-454b-b5c0-22694ef088e8\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921868 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-catalog-content\") pod \"bba4322e-397d-4b6a-b52c-14dfeecbf071\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921902 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxgtv\" (UniqueName: \"kubernetes.io/projected/715f891b-4e52-454b-b5c0-22694ef088e8-kube-api-access-xxgtv\") pod \"715f891b-4e52-454b-b5c0-22694ef088e8\" (UID: \"715f891b-4e52-454b-b5c0-22694ef088e8\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921928 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbtjp\" (UniqueName: \"kubernetes.io/projected/bba4322e-397d-4b6a-b52c-14dfeecbf071-kube-api-access-qbtjp\") pod \"bba4322e-397d-4b6a-b52c-14dfeecbf071\" (UID: \"bba4322e-397d-4b6a-b52c-14dfeecbf071\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.921945 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-utilities\") pod \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\" (UID: \"77b66620-e883-40d8-8294-b6b4a2f3ad8c\") " Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.923863 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-utilities" (OuterVolumeSpecName: "utilities") pod "77b66620-e883-40d8-8294-b6b4a2f3ad8c" (UID: "77b66620-e883-40d8-8294-b6b4a2f3ad8c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.925834 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-utilities" (OuterVolumeSpecName: "utilities") pod "715f891b-4e52-454b-b5c0-22694ef088e8" (UID: "715f891b-4e52-454b-b5c0-22694ef088e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.928182 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-utilities" (OuterVolumeSpecName: "utilities") pod "bba4322e-397d-4b6a-b52c-14dfeecbf071" (UID: "bba4322e-397d-4b6a-b52c-14dfeecbf071"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.931111 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bba4322e-397d-4b6a-b52c-14dfeecbf071-kube-api-access-qbtjp" (OuterVolumeSpecName: "kube-api-access-qbtjp") pod "bba4322e-397d-4b6a-b52c-14dfeecbf071" (UID: "bba4322e-397d-4b6a-b52c-14dfeecbf071"). InnerVolumeSpecName "kube-api-access-qbtjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.931160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/715f891b-4e52-454b-b5c0-22694ef088e8-kube-api-access-xxgtv" (OuterVolumeSpecName: "kube-api-access-xxgtv") pod "715f891b-4e52-454b-b5c0-22694ef088e8" (UID: "715f891b-4e52-454b-b5c0-22694ef088e8"). InnerVolumeSpecName "kube-api-access-xxgtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.931560 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77b66620-e883-40d8-8294-b6b4a2f3ad8c-kube-api-access-dhrp7" (OuterVolumeSpecName: "kube-api-access-dhrp7") pod "77b66620-e883-40d8-8294-b6b4a2f3ad8c" (UID: "77b66620-e883-40d8-8294-b6b4a2f3ad8c"). InnerVolumeSpecName "kube-api-access-dhrp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.937812 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrlnq"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.940967 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hrlnq"] Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.968796 4792 scope.go:117] "RemoveContainer" containerID="b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.990582 4792 scope.go:117] "RemoveContainer" containerID="367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4" Dec 02 18:42:36 crc kubenswrapper[4792]: E1202 18:42:36.991046 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4\": container with ID starting with 367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4 not found: ID does not exist" containerID="367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.991102 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4"} err="failed to get container status \"367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4\": rpc error: code = NotFound desc = could not find container \"367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4\": container with ID starting with 367ef52fc282d50cafbe486c37d9a0fce61c808e0cf6f2a19a5e5177f6f220f4 not found: ID does not exist" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.991139 4792 scope.go:117] "RemoveContainer" containerID="5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7" Dec 02 18:42:36 crc kubenswrapper[4792]: E1202 18:42:36.991891 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7\": container with ID starting with 5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7 not found: ID does not exist" containerID="5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.991934 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7"} err="failed to get container status \"5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7\": rpc error: code = NotFound desc = could not find container \"5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7\": container with ID starting with 5466f2cd997f197a84e922fad7e87e31254cd10ed751caec26f53eb8e05496a7 not found: ID does not exist" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.991969 4792 scope.go:117] "RemoveContainer" containerID="b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f" Dec 02 18:42:36 crc kubenswrapper[4792]: E1202 18:42:36.992379 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f\": container with ID starting with b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f not found: ID does not exist" containerID="b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.992407 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f"} err="failed to get container status \"b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f\": rpc error: code = NotFound desc = could not find container \"b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f\": container with ID starting with b9622c35979e0f7ebe46d73aaffb0f0f6b6d2d3a4d78258aea5d0555145c6d5f not found: ID does not exist" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.992422 4792 scope.go:117] "RemoveContainer" containerID="505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e" Dec 02 18:42:36 crc kubenswrapper[4792]: I1202 18:42:36.996352 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77b66620-e883-40d8-8294-b6b4a2f3ad8c" (UID: "77b66620-e883-40d8-8294-b6b4a2f3ad8c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.005700 4792 scope.go:117] "RemoveContainer" containerID="a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.019409 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bba4322e-397d-4b6a-b52c-14dfeecbf071" (UID: "bba4322e-397d-4b6a-b52c-14dfeecbf071"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.021808 4792 scope.go:117] "RemoveContainer" containerID="98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024283 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhrp7\" (UniqueName: \"kubernetes.io/projected/77b66620-e883-40d8-8294-b6b4a2f3ad8c-kube-api-access-dhrp7\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024324 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024342 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024354 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxgtv\" (UniqueName: \"kubernetes.io/projected/715f891b-4e52-454b-b5c0-22694ef088e8-kube-api-access-xxgtv\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024364 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbtjp\" (UniqueName: \"kubernetes.io/projected/bba4322e-397d-4b6a-b52c-14dfeecbf071-kube-api-access-qbtjp\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024375 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024384 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bba4322e-397d-4b6a-b52c-14dfeecbf071-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.024394 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77b66620-e883-40d8-8294-b6b4a2f3ad8c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.035719 4792 scope.go:117] "RemoveContainer" containerID="505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.036138 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e\": container with ID starting with 505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e not found: ID does not exist" containerID="505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.036179 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e"} err="failed to get container status \"505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e\": rpc error: code = NotFound desc = could not find container \"505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e\": container with ID starting with 505ec2e14d0f3560ae3a57d9aa8f932f8fa18afcabcbe48bdb2d3ecdc72bee2e not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.036214 4792 scope.go:117] "RemoveContainer" containerID="a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.036572 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724\": container with ID starting with a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724 not found: ID does not exist" containerID="a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.036612 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724"} err="failed to get container status \"a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724\": rpc error: code = NotFound desc = could not find container \"a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724\": container with ID starting with a6503cd0f8efe51e13a195bdcbe46dbce7ea207228414e797e3fd9c653b8b724 not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.036665 4792 scope.go:117] "RemoveContainer" containerID="98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.036948 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde\": container with ID starting with 98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde not found: ID does not exist" containerID="98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.036977 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde"} err="failed to get container status \"98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde\": rpc error: code = NotFound desc = could not find container \"98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde\": container with ID starting with 98f108e6b246309ffacd7f673483a7af7e07f7b377c87a1ff537e1a180e69fde not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.036994 4792 scope.go:117] "RemoveContainer" containerID="630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.058295 4792 scope.go:117] "RemoveContainer" containerID="de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.060430 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "715f891b-4e52-454b-b5c0-22694ef088e8" (UID: "715f891b-4e52-454b-b5c0-22694ef088e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.075193 4792 scope.go:117] "RemoveContainer" containerID="5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.095942 4792 scope.go:117] "RemoveContainer" containerID="630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.097023 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606\": container with ID starting with 630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606 not found: ID does not exist" containerID="630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.097060 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606"} err="failed to get container status \"630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606\": rpc error: code = NotFound desc = could not find container \"630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606\": container with ID starting with 630af089f94974c9797a52909316c899823f65a1a43d044168624e35d7723606 not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.097107 4792 scope.go:117] "RemoveContainer" containerID="de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.097919 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa\": container with ID starting with de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa not found: ID does not exist" containerID="de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.097951 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa"} err="failed to get container status \"de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa\": rpc error: code = NotFound desc = could not find container \"de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa\": container with ID starting with de3a9b7814ee7602d3da77cbacf2596599d30f687919faa0c50b68f27cd72daa not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.097967 4792 scope.go:117] "RemoveContainer" containerID="5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.098591 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc\": container with ID starting with 5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc not found: ID does not exist" containerID="5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.098625 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc"} err="failed to get container status \"5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc\": rpc error: code = NotFound desc = could not find container \"5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc\": container with ID starting with 5f4b0a470cf4935b43f722ad2db1816e07c10f35b5f2df4afb97aa24d320d6fc not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.098644 4792 scope.go:117] "RemoveContainer" containerID="a145eade2c7870a50a2ded2d47f9e134150e97e6cd1af473f97c07ccfeb00b9d" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.099192 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.111602 4792 scope.go:117] "RemoveContainer" containerID="8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.126499 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/715f891b-4e52-454b-b5c0-22694ef088e8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.138418 4792 scope.go:117] "RemoveContainer" containerID="d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.155757 4792 scope.go:117] "RemoveContainer" containerID="7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.168844 4792 scope.go:117] "RemoveContainer" containerID="8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.169270 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f\": container with ID starting with 8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f not found: ID does not exist" containerID="8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.169323 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f"} err="failed to get container status \"8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f\": rpc error: code = NotFound desc = could not find container \"8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f\": container with ID starting with 8bebd34a8e4011cb5b5ac971551e3e2aa6a28e093921fc7118dbbd252530852f not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.169362 4792 scope.go:117] "RemoveContainer" containerID="d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.169659 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338\": container with ID starting with d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338 not found: ID does not exist" containerID="d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.169697 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338"} err="failed to get container status \"d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338\": rpc error: code = NotFound desc = could not find container \"d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338\": container with ID starting with d9185788232b909d86a3d99049f921a1ff08625c9eaac830acda146ada5a3338 not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.169723 4792 scope.go:117] "RemoveContainer" containerID="7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336" Dec 02 18:42:37 crc kubenswrapper[4792]: E1202 18:42:37.170068 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336\": container with ID starting with 7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336 not found: ID does not exist" containerID="7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.170110 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336"} err="failed to get container status \"7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336\": rpc error: code = NotFound desc = could not find container \"7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336\": container with ID starting with 7cb8a1d079a33fca15ea39298554e95af0d3b390f6b94e29b1ac06938d337336 not found: ID does not exist" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.214981 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c85tx"] Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.221796 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c85tx"] Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.227591 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-trusted-ca\") pod \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.227685 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tv9ls\" (UniqueName: \"kubernetes.io/projected/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-kube-api-access-tv9ls\") pod \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.227820 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-operator-metrics\") pod \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\" (UID: \"ab249412-8c3a-4e5f-b84a-374b19cc1dc9\") " Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.228974 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "ab249412-8c3a-4e5f-b84a-374b19cc1dc9" (UID: "ab249412-8c3a-4e5f-b84a-374b19cc1dc9"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.243244 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-kube-api-access-tv9ls" (OuterVolumeSpecName: "kube-api-access-tv9ls") pod "ab249412-8c3a-4e5f-b84a-374b19cc1dc9" (UID: "ab249412-8c3a-4e5f-b84a-374b19cc1dc9"). InnerVolumeSpecName "kube-api-access-tv9ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.243312 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "ab249412-8c3a-4e5f-b84a-374b19cc1dc9" (UID: "ab249412-8c3a-4e5f-b84a-374b19cc1dc9"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.252491 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p6vhp"] Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.261712 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-p6vhp"] Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.267125 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7r9rs"] Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.304118 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z4jbn"] Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.308602 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z4jbn"] Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.329057 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.329088 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tv9ls\" (UniqueName: \"kubernetes.io/projected/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-kube-api-access-tv9ls\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.329099 4792 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ab249412-8c3a-4e5f-b84a-374b19cc1dc9-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.558932 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" path="/var/lib/kubelet/pods/715f891b-4e52-454b-b5c0-22694ef088e8/volumes" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.560188 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" path="/var/lib/kubelet/pods/77b66620-e883-40d8-8294-b6b4a2f3ad8c/volumes" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.561437 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" path="/var/lib/kubelet/pods/bba4322e-397d-4b6a-b52c-14dfeecbf071/volumes" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.563567 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" path="/var/lib/kubelet/pods/c118dcaa-7c35-4a91-9b16-b3796e95fa86/volumes" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.921191 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" event={"ID":"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e","Type":"ContainerStarted","Data":"8a14a79230136328491b35be1c08d4a4bdaea0bd8d000c4ad5cbdaab1fa171e4"} Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.921274 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" event={"ID":"1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e","Type":"ContainerStarted","Data":"5321a3cb77cd9ccaf7be92a6a94f17782c772c6af9500c49d8224b304c65a41f"} Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.921846 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.925748 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.928197 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" event={"ID":"ab249412-8c3a-4e5f-b84a-374b19cc1dc9","Type":"ContainerDied","Data":"4329d7a93f95071e1c5fa95afbc2403ca98bf4c07209be09f92ac45c60990060"} Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.928229 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nzgzn" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.928290 4792 scope.go:117] "RemoveContainer" containerID="824c076dc9f4885cd57f68c223cc4a78d0e3dac504b4a0be7cb0952f18f3ef45" Dec 02 18:42:37 crc kubenswrapper[4792]: I1202 18:42:37.946440 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7r9rs" podStartSLOduration=1.946428308 podStartE2EDuration="1.946428308s" podCreationTimestamp="2025-12-02 18:42:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:42:37.943284566 +0000 UTC m=+388.716176944" watchObservedRunningTime="2025-12-02 18:42:37.946428308 +0000 UTC m=+388.719320636" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.012229 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nzgzn"] Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.019209 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nzgzn"] Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.081223 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.083674 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.261754 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gtsnz"] Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262035 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262053 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262065 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262073 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262085 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262092 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262103 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262110 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262120 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262130 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262143 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262154 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262165 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262172 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262209 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262217 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262229 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262237 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262247 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262254 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262262 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262269 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="extract-content" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262278 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262286 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262297 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262305 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: E1202 18:42:38.262313 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262321 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="extract-utilities" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262442 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bba4322e-397d-4b6a-b52c-14dfeecbf071" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262457 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="77b66620-e883-40d8-8294-b6b4a2f3ad8c" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262468 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262478 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" containerName="marketplace-operator" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262490 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c118dcaa-7c35-4a91-9b16-b3796e95fa86" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.262502 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="715f891b-4e52-454b-b5c0-22694ef088e8" containerName="registry-server" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.263996 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.266552 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.277581 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gtsnz"] Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.464830 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2wlh2"] Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.466387 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.471336 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.475267 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2wlh2"] Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.492132 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvwmw\" (UniqueName: \"kubernetes.io/projected/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-kube-api-access-wvwmw\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.492362 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-utilities\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.492494 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-catalog-content\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.593642 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-utilities\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.594138 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56bb4a5-5196-4a99-8d4d-2c8449675e62-catalog-content\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.594230 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-catalog-content\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.594336 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56bb4a5-5196-4a99-8d4d-2c8449675e62-utilities\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.594418 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ndh2\" (UniqueName: \"kubernetes.io/projected/e56bb4a5-5196-4a99-8d4d-2c8449675e62-kube-api-access-9ndh2\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.594504 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvwmw\" (UniqueName: \"kubernetes.io/projected/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-kube-api-access-wvwmw\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.595443 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-utilities\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.595763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-catalog-content\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.618039 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvwmw\" (UniqueName: \"kubernetes.io/projected/2f57faa3-e0a7-4d6a-b404-0e0d2fb73461-kube-api-access-wvwmw\") pod \"redhat-marketplace-gtsnz\" (UID: \"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461\") " pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.695481 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ndh2\" (UniqueName: \"kubernetes.io/projected/e56bb4a5-5196-4a99-8d4d-2c8449675e62-kube-api-access-9ndh2\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.695591 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56bb4a5-5196-4a99-8d4d-2c8449675e62-catalog-content\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.695652 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56bb4a5-5196-4a99-8d4d-2c8449675e62-utilities\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.696306 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56bb4a5-5196-4a99-8d4d-2c8449675e62-utilities\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.696655 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56bb4a5-5196-4a99-8d4d-2c8449675e62-catalog-content\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.706358 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.714231 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ndh2\" (UniqueName: \"kubernetes.io/projected/e56bb4a5-5196-4a99-8d4d-2c8449675e62-kube-api-access-9ndh2\") pod \"redhat-operators-2wlh2\" (UID: \"e56bb4a5-5196-4a99-8d4d-2c8449675e62\") " pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:38 crc kubenswrapper[4792]: I1202 18:42:38.783925 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:39 crc kubenswrapper[4792]: I1202 18:42:39.145702 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gtsnz"] Dec 02 18:42:39 crc kubenswrapper[4792]: W1202 18:42:39.147912 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f57faa3_e0a7_4d6a_b404_0e0d2fb73461.slice/crio-d36e0b6b12697db1ad4445c8902e87426369f0ab0881d6fb58c9f050fb3729da WatchSource:0}: Error finding container d36e0b6b12697db1ad4445c8902e87426369f0ab0881d6fb58c9f050fb3729da: Status 404 returned error can't find the container with id d36e0b6b12697db1ad4445c8902e87426369f0ab0881d6fb58c9f050fb3729da Dec 02 18:42:39 crc kubenswrapper[4792]: I1202 18:42:39.249119 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2wlh2"] Dec 02 18:42:39 crc kubenswrapper[4792]: W1202 18:42:39.258371 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode56bb4a5_5196_4a99_8d4d_2c8449675e62.slice/crio-31ab5827fe4c92cb13607b8df7ac1c4d2664073de1cabae9d779b38abae83d96 WatchSource:0}: Error finding container 31ab5827fe4c92cb13607b8df7ac1c4d2664073de1cabae9d779b38abae83d96: Status 404 returned error can't find the container with id 31ab5827fe4c92cb13607b8df7ac1c4d2664073de1cabae9d779b38abae83d96 Dec 02 18:42:39 crc kubenswrapper[4792]: I1202 18:42:39.552757 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab249412-8c3a-4e5f-b84a-374b19cc1dc9" path="/var/lib/kubelet/pods/ab249412-8c3a-4e5f-b84a-374b19cc1dc9/volumes" Dec 02 18:42:39 crc kubenswrapper[4792]: I1202 18:42:39.948005 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wlh2" event={"ID":"e56bb4a5-5196-4a99-8d4d-2c8449675e62","Type":"ContainerStarted","Data":"31ab5827fe4c92cb13607b8df7ac1c4d2664073de1cabae9d779b38abae83d96"} Dec 02 18:42:39 crc kubenswrapper[4792]: I1202 18:42:39.950337 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gtsnz" event={"ID":"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461","Type":"ContainerStarted","Data":"d36e0b6b12697db1ad4445c8902e87426369f0ab0881d6fb58c9f050fb3729da"} Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.677140 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ch67n"] Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.678908 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.686805 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.692852 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ch67n"] Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.830150 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-catalog-content\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.830238 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkfnv\" (UniqueName: \"kubernetes.io/projected/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-kube-api-access-fkfnv\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.830284 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-utilities\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.878343 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p7bwt"] Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.884950 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.888346 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p7bwt"] Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.890869 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.932430 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-catalog-content\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.932490 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-utilities\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.932514 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkfnv\" (UniqueName: \"kubernetes.io/projected/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-kube-api-access-fkfnv\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.933087 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-catalog-content\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.933480 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-utilities\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:40 crc kubenswrapper[4792]: I1202 18:42:40.955052 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkfnv\" (UniqueName: \"kubernetes.io/projected/ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b-kube-api-access-fkfnv\") pod \"community-operators-ch67n\" (UID: \"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b\") " pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.033337 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.033804 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8bgr\" (UniqueName: \"kubernetes.io/projected/72d09f2a-dc85-44d7-bd86-7248dcedd68b-kube-api-access-q8bgr\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.034505 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72d09f2a-dc85-44d7-bd86-7248dcedd68b-utilities\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.034788 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72d09f2a-dc85-44d7-bd86-7248dcedd68b-catalog-content\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.137446 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72d09f2a-dc85-44d7-bd86-7248dcedd68b-catalog-content\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.137499 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8bgr\" (UniqueName: \"kubernetes.io/projected/72d09f2a-dc85-44d7-bd86-7248dcedd68b-kube-api-access-q8bgr\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.137628 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72d09f2a-dc85-44d7-bd86-7248dcedd68b-utilities\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.138197 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72d09f2a-dc85-44d7-bd86-7248dcedd68b-catalog-content\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.138246 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72d09f2a-dc85-44d7-bd86-7248dcedd68b-utilities\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.161301 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8bgr\" (UniqueName: \"kubernetes.io/projected/72d09f2a-dc85-44d7-bd86-7248dcedd68b-kube-api-access-q8bgr\") pod \"certified-operators-p7bwt\" (UID: \"72d09f2a-dc85-44d7-bd86-7248dcedd68b\") " pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.213904 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.497768 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ch67n"] Dec 02 18:42:41 crc kubenswrapper[4792]: W1202 18:42:41.501942 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podace09f1b_8dfe_48ae_93b6_b6a6f32beb8b.slice/crio-1210a8bb0c29c118fe5909683b1d5ca812329fe1962bc076390137d112bf1d32 WatchSource:0}: Error finding container 1210a8bb0c29c118fe5909683b1d5ca812329fe1962bc076390137d112bf1d32: Status 404 returned error can't find the container with id 1210a8bb0c29c118fe5909683b1d5ca812329fe1962bc076390137d112bf1d32 Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.640752 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p7bwt"] Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.964362 4792 generic.go:334] "Generic (PLEG): container finished" podID="ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b" containerID="e8b3ae23b63635bef3c6714ea6e9339fe4e73be692881f55f97997cd79f93894" exitCode=0 Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.964909 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch67n" event={"ID":"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b","Type":"ContainerDied","Data":"e8b3ae23b63635bef3c6714ea6e9339fe4e73be692881f55f97997cd79f93894"} Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.964947 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch67n" event={"ID":"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b","Type":"ContainerStarted","Data":"1210a8bb0c29c118fe5909683b1d5ca812329fe1962bc076390137d112bf1d32"} Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.971271 4792 generic.go:334] "Generic (PLEG): container finished" podID="72d09f2a-dc85-44d7-bd86-7248dcedd68b" containerID="8407abd6c46ca5c7d824abe1e31ce4d9c9aef08e34a02a110e8e0e20e0794c68" exitCode=0 Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.971379 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7bwt" event={"ID":"72d09f2a-dc85-44d7-bd86-7248dcedd68b","Type":"ContainerDied","Data":"8407abd6c46ca5c7d824abe1e31ce4d9c9aef08e34a02a110e8e0e20e0794c68"} Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.971472 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7bwt" event={"ID":"72d09f2a-dc85-44d7-bd86-7248dcedd68b","Type":"ContainerStarted","Data":"647c684decc1f02cf2a5bb90243e37fb11206dad342e25eedefd0fef2f9dae3a"} Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.974894 4792 generic.go:334] "Generic (PLEG): container finished" podID="e56bb4a5-5196-4a99-8d4d-2c8449675e62" containerID="3496a13a1779f25deb259629d2560b0d2e2770cd0093b28bedce86471ae7e991" exitCode=0 Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.975006 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wlh2" event={"ID":"e56bb4a5-5196-4a99-8d4d-2c8449675e62","Type":"ContainerDied","Data":"3496a13a1779f25deb259629d2560b0d2e2770cd0093b28bedce86471ae7e991"} Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.977150 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f57faa3-e0a7-4d6a-b404-0e0d2fb73461" containerID="22e47bceb65112e58b0fc6b4caeb355b7358b170d9b466a7e54d31b890bc5943" exitCode=0 Dec 02 18:42:41 crc kubenswrapper[4792]: I1202 18:42:41.977183 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gtsnz" event={"ID":"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461","Type":"ContainerDied","Data":"22e47bceb65112e58b0fc6b4caeb355b7358b170d9b466a7e54d31b890bc5943"} Dec 02 18:42:42 crc kubenswrapper[4792]: I1202 18:42:42.984718 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wlh2" event={"ID":"e56bb4a5-5196-4a99-8d4d-2c8449675e62","Type":"ContainerStarted","Data":"60b5528c29ac3bb08604c52ec44abc6a743cda3ee28085b0c312a4d41a258cf8"} Dec 02 18:42:42 crc kubenswrapper[4792]: I1202 18:42:42.986555 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gtsnz" event={"ID":"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461","Type":"ContainerStarted","Data":"e5fa272061529a575ae55d5785660c2d67db04fa38bb5da7d4bdef805fe894d7"} Dec 02 18:42:42 crc kubenswrapper[4792]: I1202 18:42:42.988382 4792 generic.go:334] "Generic (PLEG): container finished" podID="ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b" containerID="30ee0bd5a4f33ca6f49c9185c585b2460ee41a5bdbe8da580c34328e2d79bf9f" exitCode=0 Dec 02 18:42:42 crc kubenswrapper[4792]: I1202 18:42:42.988429 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch67n" event={"ID":"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b","Type":"ContainerDied","Data":"30ee0bd5a4f33ca6f49c9185c585b2460ee41a5bdbe8da580c34328e2d79bf9f"} Dec 02 18:42:42 crc kubenswrapper[4792]: I1202 18:42:42.990242 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7bwt" event={"ID":"72d09f2a-dc85-44d7-bd86-7248dcedd68b","Type":"ContainerStarted","Data":"283ac11eebf6f7876a90ffe4eec6de75726f34ef1551cb964df75e01cb48dbc4"} Dec 02 18:42:44 crc kubenswrapper[4792]: I1202 18:42:44.009969 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f57faa3-e0a7-4d6a-b404-0e0d2fb73461" containerID="e5fa272061529a575ae55d5785660c2d67db04fa38bb5da7d4bdef805fe894d7" exitCode=0 Dec 02 18:42:44 crc kubenswrapper[4792]: I1202 18:42:44.010053 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gtsnz" event={"ID":"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461","Type":"ContainerDied","Data":"e5fa272061529a575ae55d5785660c2d67db04fa38bb5da7d4bdef805fe894d7"} Dec 02 18:42:44 crc kubenswrapper[4792]: I1202 18:42:44.014004 4792 generic.go:334] "Generic (PLEG): container finished" podID="e56bb4a5-5196-4a99-8d4d-2c8449675e62" containerID="60b5528c29ac3bb08604c52ec44abc6a743cda3ee28085b0c312a4d41a258cf8" exitCode=0 Dec 02 18:42:44 crc kubenswrapper[4792]: I1202 18:42:44.014160 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wlh2" event={"ID":"e56bb4a5-5196-4a99-8d4d-2c8449675e62","Type":"ContainerDied","Data":"60b5528c29ac3bb08604c52ec44abc6a743cda3ee28085b0c312a4d41a258cf8"} Dec 02 18:42:44 crc kubenswrapper[4792]: I1202 18:42:44.018956 4792 generic.go:334] "Generic (PLEG): container finished" podID="72d09f2a-dc85-44d7-bd86-7248dcedd68b" containerID="283ac11eebf6f7876a90ffe4eec6de75726f34ef1551cb964df75e01cb48dbc4" exitCode=0 Dec 02 18:42:44 crc kubenswrapper[4792]: I1202 18:42:44.019019 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7bwt" event={"ID":"72d09f2a-dc85-44d7-bd86-7248dcedd68b","Type":"ContainerDied","Data":"283ac11eebf6f7876a90ffe4eec6de75726f34ef1551cb964df75e01cb48dbc4"} Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.047827 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7bwt" event={"ID":"72d09f2a-dc85-44d7-bd86-7248dcedd68b","Type":"ContainerStarted","Data":"a175d74e33a1810d31b82809f7344439589f3bd8cb68e7a0007910b61e48a659"} Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.051119 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wlh2" event={"ID":"e56bb4a5-5196-4a99-8d4d-2c8449675e62","Type":"ContainerStarted","Data":"9f49e3420963d2ad9246128f271ec258b439903c281b265a7aff75849476a0f4"} Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.053733 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gtsnz" event={"ID":"2f57faa3-e0a7-4d6a-b404-0e0d2fb73461","Type":"ContainerStarted","Data":"dade71d9034a93582219bea15ca7ec2f6bbd35dc1a7b452088872dd47044870b"} Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.056015 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ch67n" event={"ID":"ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b","Type":"ContainerStarted","Data":"3fbb528897d77eaee2e71c608e5bf99f2a2e0fe130ea420f95c009be1badbbba"} Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.074005 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p7bwt" podStartSLOduration=3.039359673 podStartE2EDuration="8.073983131s" podCreationTimestamp="2025-12-02 18:42:40 +0000 UTC" firstStartedPulling="2025-12-02 18:42:41.973735103 +0000 UTC m=+392.746627421" lastFinishedPulling="2025-12-02 18:42:47.008358551 +0000 UTC m=+397.781250879" observedRunningTime="2025-12-02 18:42:48.072123042 +0000 UTC m=+398.845015370" watchObservedRunningTime="2025-12-02 18:42:48.073983131 +0000 UTC m=+398.846875459" Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.095297 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ch67n" podStartSLOduration=3.169343031 podStartE2EDuration="8.095277084s" podCreationTimestamp="2025-12-02 18:42:40 +0000 UTC" firstStartedPulling="2025-12-02 18:42:41.966772012 +0000 UTC m=+392.739664340" lastFinishedPulling="2025-12-02 18:42:46.892706075 +0000 UTC m=+397.665598393" observedRunningTime="2025-12-02 18:42:48.094965966 +0000 UTC m=+398.867858304" watchObservedRunningTime="2025-12-02 18:42:48.095277084 +0000 UTC m=+398.868169422" Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.117736 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2wlh2" podStartSLOduration=4.928254794 podStartE2EDuration="10.117710877s" podCreationTimestamp="2025-12-02 18:42:38 +0000 UTC" firstStartedPulling="2025-12-02 18:42:41.976356371 +0000 UTC m=+392.749248699" lastFinishedPulling="2025-12-02 18:42:47.165812454 +0000 UTC m=+397.938704782" observedRunningTime="2025-12-02 18:42:48.113167379 +0000 UTC m=+398.886059727" watchObservedRunningTime="2025-12-02 18:42:48.117710877 +0000 UTC m=+398.890603215" Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.138041 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gtsnz" podStartSLOduration=5.194648929 podStartE2EDuration="10.138018045s" podCreationTimestamp="2025-12-02 18:42:38 +0000 UTC" firstStartedPulling="2025-12-02 18:42:41.979095252 +0000 UTC m=+392.751987580" lastFinishedPulling="2025-12-02 18:42:46.922464338 +0000 UTC m=+397.695356696" observedRunningTime="2025-12-02 18:42:48.134282338 +0000 UTC m=+398.907174676" watchObservedRunningTime="2025-12-02 18:42:48.138018045 +0000 UTC m=+398.910910373" Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.706990 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.707068 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.784707 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:48 crc kubenswrapper[4792]: I1202 18:42:48.784789 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:49 crc kubenswrapper[4792]: I1202 18:42:49.744802 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-gtsnz" podUID="2f57faa3-e0a7-4d6a-b404-0e0d2fb73461" containerName="registry-server" probeResult="failure" output=< Dec 02 18:42:49 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 18:42:49 crc kubenswrapper[4792]: > Dec 02 18:42:49 crc kubenswrapper[4792]: I1202 18:42:49.822469 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2wlh2" podUID="e56bb4a5-5196-4a99-8d4d-2c8449675e62" containerName="registry-server" probeResult="failure" output=< Dec 02 18:42:49 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 18:42:49 crc kubenswrapper[4792]: > Dec 02 18:42:51 crc kubenswrapper[4792]: I1202 18:42:51.034516 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:51 crc kubenswrapper[4792]: I1202 18:42:51.034627 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:51 crc kubenswrapper[4792]: I1202 18:42:51.093019 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:42:51 crc kubenswrapper[4792]: I1202 18:42:51.214340 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:51 crc kubenswrapper[4792]: I1202 18:42:51.215296 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:51 crc kubenswrapper[4792]: I1202 18:42:51.273091 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:52 crc kubenswrapper[4792]: I1202 18:42:52.130120 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p7bwt" Dec 02 18:42:54 crc kubenswrapper[4792]: I1202 18:42:54.755721 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" podUID="c586142b-c192-4706-9026-bcf666e8f7c6" containerName="registry" containerID="cri-o://613de27f91fff11016d963e1ea569a21e5b50e07c0e5f9778b557f93a83f8c13" gracePeriod=30 Dec 02 18:42:58 crc kubenswrapper[4792]: I1202 18:42:58.782764 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:58 crc kubenswrapper[4792]: I1202 18:42:58.835212 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:58 crc kubenswrapper[4792]: I1202 18:42:58.852808 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gtsnz" Dec 02 18:42:58 crc kubenswrapper[4792]: I1202 18:42:58.890793 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2wlh2" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.138827 4792 generic.go:334] "Generic (PLEG): container finished" podID="c586142b-c192-4706-9026-bcf666e8f7c6" containerID="613de27f91fff11016d963e1ea569a21e5b50e07c0e5f9778b557f93a83f8c13" exitCode=0 Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.138965 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" event={"ID":"c586142b-c192-4706-9026-bcf666e8f7c6","Type":"ContainerDied","Data":"613de27f91fff11016d963e1ea569a21e5b50e07c0e5f9778b557f93a83f8c13"} Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.139032 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" event={"ID":"c586142b-c192-4706-9026-bcf666e8f7c6","Type":"ContainerDied","Data":"5511b3eb5519048f2c7f42af62cd3094851b16f10ef5b843ab87fa185737aaec"} Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.139052 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5511b3eb5519048f2c7f42af62cd3094851b16f10ef5b843ab87fa185737aaec" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.189703 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.249128 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-trusted-ca\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.249186 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c586142b-c192-4706-9026-bcf666e8f7c6-installation-pull-secrets\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.249217 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-registry-certificates\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.249261 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c586142b-c192-4706-9026-bcf666e8f7c6-ca-trust-extracted\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.249320 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpwrv\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-kube-api-access-cpwrv\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.249342 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-bound-sa-token\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.250219 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.250337 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-registry-tls\") pod \"c586142b-c192-4706-9026-bcf666e8f7c6\" (UID: \"c586142b-c192-4706-9026-bcf666e8f7c6\") " Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.251721 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.251739 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.253886 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.254073 4792 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c586142b-c192-4706-9026-bcf666e8f7c6-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.266073 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.266201 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c586142b-c192-4706-9026-bcf666e8f7c6-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.268870 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c586142b-c192-4706-9026-bcf666e8f7c6-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.270984 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-kube-api-access-cpwrv" (OuterVolumeSpecName: "kube-api-access-cpwrv") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "kube-api-access-cpwrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.271351 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.272067 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "c586142b-c192-4706-9026-bcf666e8f7c6" (UID: "c586142b-c192-4706-9026-bcf666e8f7c6"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.355993 4792 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c586142b-c192-4706-9026-bcf666e8f7c6-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.356035 4792 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c586142b-c192-4706-9026-bcf666e8f7c6-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.356046 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpwrv\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-kube-api-access-cpwrv\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.356058 4792 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 18:42:59 crc kubenswrapper[4792]: I1202 18:42:59.356067 4792 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c586142b-c192-4706-9026-bcf666e8f7c6-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 18:43:00 crc kubenswrapper[4792]: I1202 18:43:00.147066 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-nr8ml" Dec 02 18:43:00 crc kubenswrapper[4792]: I1202 18:43:00.171701 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nr8ml"] Dec 02 18:43:00 crc kubenswrapper[4792]: I1202 18:43:00.175289 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-nr8ml"] Dec 02 18:43:01 crc kubenswrapper[4792]: I1202 18:43:01.078777 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ch67n" Dec 02 18:43:01 crc kubenswrapper[4792]: I1202 18:43:01.556764 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c586142b-c192-4706-9026-bcf666e8f7c6" path="/var/lib/kubelet/pods/c586142b-c192-4706-9026-bcf666e8f7c6/volumes" Dec 02 18:43:08 crc kubenswrapper[4792]: I1202 18:43:08.082024 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:43:08 crc kubenswrapper[4792]: I1202 18:43:08.082386 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:43:08 crc kubenswrapper[4792]: I1202 18:43:08.082452 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:43:08 crc kubenswrapper[4792]: I1202 18:43:08.083331 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ac55c565ccef01baeaf0c74e3ed8322811e310a89ab86a4e7ab2e0c46a0fe098"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 18:43:08 crc kubenswrapper[4792]: I1202 18:43:08.083406 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://ac55c565ccef01baeaf0c74e3ed8322811e310a89ab86a4e7ab2e0c46a0fe098" gracePeriod=600 Dec 02 18:43:09 crc kubenswrapper[4792]: I1202 18:43:09.221409 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="ac55c565ccef01baeaf0c74e3ed8322811e310a89ab86a4e7ab2e0c46a0fe098" exitCode=0 Dec 02 18:43:09 crc kubenswrapper[4792]: I1202 18:43:09.221513 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"ac55c565ccef01baeaf0c74e3ed8322811e310a89ab86a4e7ab2e0c46a0fe098"} Dec 02 18:43:09 crc kubenswrapper[4792]: I1202 18:43:09.222668 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"b375625f1c2078e924950cfc9619c063ebe0f56fdae68ff761f6603f5e189ffa"} Dec 02 18:43:09 crc kubenswrapper[4792]: I1202 18:43:09.222748 4792 scope.go:117] "RemoveContainer" containerID="a3e5d756c71d607fb2bddc50d41146634abc208a86cb5748910948615433457c" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.189954 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5"] Dec 02 18:45:00 crc kubenswrapper[4792]: E1202 18:45:00.192073 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c586142b-c192-4706-9026-bcf666e8f7c6" containerName="registry" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.192144 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c586142b-c192-4706-9026-bcf666e8f7c6" containerName="registry" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.192600 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c586142b-c192-4706-9026-bcf666e8f7c6" containerName="registry" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.193384 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.195614 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.196134 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.202876 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5"] Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.262413 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2a1557aa-b284-4da6-85e5-40405c02f4e9-secret-volume\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.262471 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbxsw\" (UniqueName: \"kubernetes.io/projected/2a1557aa-b284-4da6-85e5-40405c02f4e9-kube-api-access-zbxsw\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.262611 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a1557aa-b284-4da6-85e5-40405c02f4e9-config-volume\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.363951 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a1557aa-b284-4da6-85e5-40405c02f4e9-config-volume\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.364074 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2a1557aa-b284-4da6-85e5-40405c02f4e9-secret-volume\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.364148 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbxsw\" (UniqueName: \"kubernetes.io/projected/2a1557aa-b284-4da6-85e5-40405c02f4e9-kube-api-access-zbxsw\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.365847 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a1557aa-b284-4da6-85e5-40405c02f4e9-config-volume\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.377778 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2a1557aa-b284-4da6-85e5-40405c02f4e9-secret-volume\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.395287 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbxsw\" (UniqueName: \"kubernetes.io/projected/2a1557aa-b284-4da6-85e5-40405c02f4e9-kube-api-access-zbxsw\") pod \"collect-profiles-29411685-nptp5\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.513842 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:00 crc kubenswrapper[4792]: I1202 18:45:00.780800 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5"] Dec 02 18:45:01 crc kubenswrapper[4792]: I1202 18:45:01.109291 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" event={"ID":"2a1557aa-b284-4da6-85e5-40405c02f4e9","Type":"ContainerStarted","Data":"d5d2394ddd22fd280c338819ca886d64642feeaca364a55071145f8d61126307"} Dec 02 18:45:02 crc kubenswrapper[4792]: I1202 18:45:02.122138 4792 generic.go:334] "Generic (PLEG): container finished" podID="2a1557aa-b284-4da6-85e5-40405c02f4e9" containerID="1f46444b1b93ad5fd5c3b2fc1e4ded3d3e973568f49cf1d9d555c8179d3f1168" exitCode=0 Dec 02 18:45:02 crc kubenswrapper[4792]: I1202 18:45:02.122269 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" event={"ID":"2a1557aa-b284-4da6-85e5-40405c02f4e9","Type":"ContainerDied","Data":"1f46444b1b93ad5fd5c3b2fc1e4ded3d3e973568f49cf1d9d555c8179d3f1168"} Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.450648 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.512496 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2a1557aa-b284-4da6-85e5-40405c02f4e9-secret-volume\") pod \"2a1557aa-b284-4da6-85e5-40405c02f4e9\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.525145 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a1557aa-b284-4da6-85e5-40405c02f4e9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2a1557aa-b284-4da6-85e5-40405c02f4e9" (UID: "2a1557aa-b284-4da6-85e5-40405c02f4e9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.613864 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a1557aa-b284-4da6-85e5-40405c02f4e9-config-volume\") pod \"2a1557aa-b284-4da6-85e5-40405c02f4e9\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.614042 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbxsw\" (UniqueName: \"kubernetes.io/projected/2a1557aa-b284-4da6-85e5-40405c02f4e9-kube-api-access-zbxsw\") pod \"2a1557aa-b284-4da6-85e5-40405c02f4e9\" (UID: \"2a1557aa-b284-4da6-85e5-40405c02f4e9\") " Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.615055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a1557aa-b284-4da6-85e5-40405c02f4e9-config-volume" (OuterVolumeSpecName: "config-volume") pod "2a1557aa-b284-4da6-85e5-40405c02f4e9" (UID: "2a1557aa-b284-4da6-85e5-40405c02f4e9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.615578 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a1557aa-b284-4da6-85e5-40405c02f4e9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.615635 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2a1557aa-b284-4da6-85e5-40405c02f4e9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.621075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a1557aa-b284-4da6-85e5-40405c02f4e9-kube-api-access-zbxsw" (OuterVolumeSpecName: "kube-api-access-zbxsw") pod "2a1557aa-b284-4da6-85e5-40405c02f4e9" (UID: "2a1557aa-b284-4da6-85e5-40405c02f4e9"). InnerVolumeSpecName "kube-api-access-zbxsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:45:03 crc kubenswrapper[4792]: I1202 18:45:03.716375 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbxsw\" (UniqueName: \"kubernetes.io/projected/2a1557aa-b284-4da6-85e5-40405c02f4e9-kube-api-access-zbxsw\") on node \"crc\" DevicePath \"\"" Dec 02 18:45:04 crc kubenswrapper[4792]: I1202 18:45:04.141732 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" event={"ID":"2a1557aa-b284-4da6-85e5-40405c02f4e9","Type":"ContainerDied","Data":"d5d2394ddd22fd280c338819ca886d64642feeaca364a55071145f8d61126307"} Dec 02 18:45:04 crc kubenswrapper[4792]: I1202 18:45:04.142441 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5d2394ddd22fd280c338819ca886d64642feeaca364a55071145f8d61126307" Dec 02 18:45:04 crc kubenswrapper[4792]: I1202 18:45:04.141817 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5" Dec 02 18:45:08 crc kubenswrapper[4792]: I1202 18:45:08.081184 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:45:08 crc kubenswrapper[4792]: I1202 18:45:08.081326 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:45:09 crc kubenswrapper[4792]: I1202 18:45:09.805438 4792 scope.go:117] "RemoveContainer" containerID="056613063c45fde111acb78cdcca0d1a8819bc3c5d4b121856a75320de201301" Dec 02 18:45:09 crc kubenswrapper[4792]: I1202 18:45:09.839285 4792 scope.go:117] "RemoveContainer" containerID="613de27f91fff11016d963e1ea569a21e5b50e07c0e5f9778b557f93a83f8c13" Dec 02 18:45:38 crc kubenswrapper[4792]: I1202 18:45:38.081850 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:45:38 crc kubenswrapper[4792]: I1202 18:45:38.083771 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.081466 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.082639 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.082748 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.084196 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b375625f1c2078e924950cfc9619c063ebe0f56fdae68ff761f6603f5e189ffa"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.084359 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://b375625f1c2078e924950cfc9619c063ebe0f56fdae68ff761f6603f5e189ffa" gracePeriod=600 Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.615606 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="b375625f1c2078e924950cfc9619c063ebe0f56fdae68ff761f6603f5e189ffa" exitCode=0 Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.615714 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"b375625f1c2078e924950cfc9619c063ebe0f56fdae68ff761f6603f5e189ffa"} Dec 02 18:46:08 crc kubenswrapper[4792]: I1202 18:46:08.616283 4792 scope.go:117] "RemoveContainer" containerID="ac55c565ccef01baeaf0c74e3ed8322811e310a89ab86a4e7ab2e0c46a0fe098" Dec 02 18:46:09 crc kubenswrapper[4792]: I1202 18:46:09.625155 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"2d504f78e5a425b84ad94074fbe4a535e7551511fbcfda776b0f6a3eefb2619a"} Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.756852 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb"] Dec 02 18:47:59 crc kubenswrapper[4792]: E1202 18:47:59.758030 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a1557aa-b284-4da6-85e5-40405c02f4e9" containerName="collect-profiles" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.758047 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a1557aa-b284-4da6-85e5-40405c02f4e9" containerName="collect-profiles" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.758159 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a1557aa-b284-4da6-85e5-40405c02f4e9" containerName="collect-profiles" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.759108 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.761043 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.771078 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb"] Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.832094 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrtbv\" (UniqueName: \"kubernetes.io/projected/d557837e-335a-4da0-b7aa-e8d0a516eada-kube-api-access-lrtbv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.832175 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.832312 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.933609 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrtbv\" (UniqueName: \"kubernetes.io/projected/d557837e-335a-4da0-b7aa-e8d0a516eada-kube-api-access-lrtbv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.934137 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.934255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.934773 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.934854 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:47:59 crc kubenswrapper[4792]: I1202 18:47:59.959202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrtbv\" (UniqueName: \"kubernetes.io/projected/d557837e-335a-4da0-b7aa-e8d0a516eada-kube-api-access-lrtbv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:48:00 crc kubenswrapper[4792]: I1202 18:48:00.080760 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:48:00 crc kubenswrapper[4792]: I1202 18:48:00.328013 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb"] Dec 02 18:48:00 crc kubenswrapper[4792]: I1202 18:48:00.610632 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" event={"ID":"d557837e-335a-4da0-b7aa-e8d0a516eada","Type":"ContainerStarted","Data":"0d844cdd76147e60d37724afa5c6066deedcc496739e18966de8320c97ef2b5f"} Dec 02 18:48:01 crc kubenswrapper[4792]: I1202 18:48:01.619924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" event={"ID":"d557837e-335a-4da0-b7aa-e8d0a516eada","Type":"ContainerStarted","Data":"e4f1feb7ff0de571d9163263d107fc042b6cde9ad3ebd7286de1a96d88df5ff3"} Dec 02 18:48:02 crc kubenswrapper[4792]: I1202 18:48:02.631696 4792 generic.go:334] "Generic (PLEG): container finished" podID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerID="e4f1feb7ff0de571d9163263d107fc042b6cde9ad3ebd7286de1a96d88df5ff3" exitCode=0 Dec 02 18:48:02 crc kubenswrapper[4792]: I1202 18:48:02.631771 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" event={"ID":"d557837e-335a-4da0-b7aa-e8d0a516eada","Type":"ContainerDied","Data":"e4f1feb7ff0de571d9163263d107fc042b6cde9ad3ebd7286de1a96d88df5ff3"} Dec 02 18:48:02 crc kubenswrapper[4792]: I1202 18:48:02.635295 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 18:48:04 crc kubenswrapper[4792]: I1202 18:48:04.649340 4792 generic.go:334] "Generic (PLEG): container finished" podID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerID="f61a09c644f77351be0dbf4f0492e0e459210559395fcaddf47eb47eced739ef" exitCode=0 Dec 02 18:48:04 crc kubenswrapper[4792]: I1202 18:48:04.649437 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" event={"ID":"d557837e-335a-4da0-b7aa-e8d0a516eada","Type":"ContainerDied","Data":"f61a09c644f77351be0dbf4f0492e0e459210559395fcaddf47eb47eced739ef"} Dec 02 18:48:05 crc kubenswrapper[4792]: I1202 18:48:05.660371 4792 generic.go:334] "Generic (PLEG): container finished" podID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerID="15a3999400d0d4a1de22ef5cf64a5376133279070400b2db57dd33954e028665" exitCode=0 Dec 02 18:48:05 crc kubenswrapper[4792]: I1202 18:48:05.660457 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" event={"ID":"d557837e-335a-4da0-b7aa-e8d0a516eada","Type":"ContainerDied","Data":"15a3999400d0d4a1de22ef5cf64a5376133279070400b2db57dd33954e028665"} Dec 02 18:48:06 crc kubenswrapper[4792]: I1202 18:48:06.916060 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.013451 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-bundle\") pod \"d557837e-335a-4da0-b7aa-e8d0a516eada\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.013739 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrtbv\" (UniqueName: \"kubernetes.io/projected/d557837e-335a-4da0-b7aa-e8d0a516eada-kube-api-access-lrtbv\") pod \"d557837e-335a-4da0-b7aa-e8d0a516eada\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.013813 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-util\") pod \"d557837e-335a-4da0-b7aa-e8d0a516eada\" (UID: \"d557837e-335a-4da0-b7aa-e8d0a516eada\") " Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.016644 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-bundle" (OuterVolumeSpecName: "bundle") pod "d557837e-335a-4da0-b7aa-e8d0a516eada" (UID: "d557837e-335a-4da0-b7aa-e8d0a516eada"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.023680 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d557837e-335a-4da0-b7aa-e8d0a516eada-kube-api-access-lrtbv" (OuterVolumeSpecName: "kube-api-access-lrtbv") pod "d557837e-335a-4da0-b7aa-e8d0a516eada" (UID: "d557837e-335a-4da0-b7aa-e8d0a516eada"). InnerVolumeSpecName "kube-api-access-lrtbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.025448 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-util" (OuterVolumeSpecName: "util") pod "d557837e-335a-4da0-b7aa-e8d0a516eada" (UID: "d557837e-335a-4da0-b7aa-e8d0a516eada"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.115449 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.115495 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrtbv\" (UniqueName: \"kubernetes.io/projected/d557837e-335a-4da0-b7aa-e8d0a516eada-kube-api-access-lrtbv\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.115507 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d557837e-335a-4da0-b7aa-e8d0a516eada-util\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.677031 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" event={"ID":"d557837e-335a-4da0-b7aa-e8d0a516eada","Type":"ContainerDied","Data":"0d844cdd76147e60d37724afa5c6066deedcc496739e18966de8320c97ef2b5f"} Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.677557 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d844cdd76147e60d37724afa5c6066deedcc496739e18966de8320c97ef2b5f" Dec 02 18:48:07 crc kubenswrapper[4792]: I1202 18:48:07.677204 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb" Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.890723 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4jhb5"] Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.892107 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-controller" containerID="cri-o://38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" gracePeriod=30 Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.892779 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="sbdb" containerID="cri-o://8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" gracePeriod=30 Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.892853 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="nbdb" containerID="cri-o://04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" gracePeriod=30 Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.892912 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="northd" containerID="cri-o://8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" gracePeriod=30 Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.892966 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" gracePeriod=30 Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.893039 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-node" containerID="cri-o://b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" gracePeriod=30 Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.893094 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-acl-logging" containerID="cri-o://dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" gracePeriod=30 Dec 02 18:48:10 crc kubenswrapper[4792]: I1202 18:48:10.958890 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" containerID="cri-o://1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" gracePeriod=30 Dec 02 18:48:11 crc kubenswrapper[4792]: I1202 18:48:11.709841 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/3.log" Dec 02 18:48:11 crc kubenswrapper[4792]: I1202 18:48:11.712908 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovn-acl-logging/0.log" Dec 02 18:48:11 crc kubenswrapper[4792]: I1202 18:48:11.713971 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" exitCode=0 Dec 02 18:48:11 crc kubenswrapper[4792]: I1202 18:48:11.714015 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" exitCode=143 Dec 02 18:48:11 crc kubenswrapper[4792]: I1202 18:48:11.714050 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} Dec 02 18:48:11 crc kubenswrapper[4792]: I1202 18:48:11.714099 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.399058 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/3.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.401513 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovn-acl-logging/0.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.402003 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovn-controller/0.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.402545 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470249 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7rjvb"] Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470465 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470478 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470486 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470492 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470500 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-acl-logging" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470506 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-acl-logging" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470531 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerName="extract" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470536 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerName="extract" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470543 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kubecfg-setup" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470548 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kubecfg-setup" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470554 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-node" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470561 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-node" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470570 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="nbdb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470575 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="nbdb" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470582 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470587 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470596 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470601 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470608 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="sbdb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470616 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="sbdb" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470624 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerName="pull" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470630 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerName="pull" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470642 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="northd" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470648 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="northd" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470656 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470663 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470671 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerName="util" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470677 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerName="util" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470758 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470768 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-acl-logging" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470775 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470784 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d557837e-335a-4da0-b7aa-e8d0a516eada" containerName="extract" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470791 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470799 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="northd" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470807 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="sbdb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470817 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovn-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470824 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-node" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470831 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470839 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="nbdb" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470917 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470924 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.470932 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.470938 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.471015 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.471025 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerName="ovnkube-controller" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.472445 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.594852 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-log-socket\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.594917 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovn-node-metrics-cert\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.594942 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-openvswitch\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.594973 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vd5d\" (UniqueName: \"kubernetes.io/projected/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-kube-api-access-2vd5d\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.594992 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-kubelet\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595014 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-node-log\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595042 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-etc-openvswitch\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595035 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-log-socket" (OuterVolumeSpecName: "log-socket") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595063 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-ovn\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595131 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595119 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595152 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-node-log" (OuterVolumeSpecName: "node-log") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595150 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595171 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595181 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595159 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-var-lib-cni-networks-ovn-kubernetes\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595315 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-slash\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595342 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-var-lib-openvswitch\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595370 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-bin\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595407 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-netns\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595442 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-config\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595473 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-systemd\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595494 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-script-lib\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595509 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-systemd-units\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595540 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-netd\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595561 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-env-overrides\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595579 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-ovn-kubernetes\") pod \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\" (UID: \"2f79c130-fb71-4e1c-9e2d-ef492a0acb04\") " Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595769 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595809 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-etc-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595850 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-run-ovn-kubernetes\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595891 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-cni-bin\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595950 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-node-log\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595974 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-log-socket\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596007 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-systemd-units\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.595999 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596031 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-cni-netd\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596051 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-env-overrides\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596061 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-slash" (OuterVolumeSpecName: "host-slash") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596082 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596102 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596105 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-ovn\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596138 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596174 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-var-lib-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596237 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cfb6b15-6de9-4109-a346-f3140614b56f-ovn-node-metrics-cert\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596267 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-slash\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596289 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-kubelet\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596307 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-run-netns\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596325 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596347 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-ovnkube-config\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596367 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qtpf\" (UniqueName: \"kubernetes.io/projected/2cfb6b15-6de9-4109-a346-f3140614b56f-kube-api-access-5qtpf\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596416 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-ovnkube-script-lib\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596457 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-systemd\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596540 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596560 4792 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596576 4792 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596575 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596564 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596601 4792 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596782 4792 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-slash\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596810 4792 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596825 4792 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596838 4792 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596849 4792 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596861 4792 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-log-socket\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596871 4792 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596881 4792 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596891 4792 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-node-log\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.596937 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.597023 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.608728 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-kube-api-access-2vd5d" (OuterVolumeSpecName: "kube-api-access-2vd5d") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "kube-api-access-2vd5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.613253 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.616091 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "2f79c130-fb71-4e1c-9e2d-ef492a0acb04" (UID: "2f79c130-fb71-4e1c-9e2d-ef492a0acb04"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.697871 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-ovn\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.697924 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-var-lib-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.697973 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cfb6b15-6de9-4109-a346-f3140614b56f-ovn-node-metrics-cert\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.697992 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-slash\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.697991 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-ovn\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698070 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-kubelet\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698013 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-kubelet\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698123 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-run-netns\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698145 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698176 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-ovnkube-config\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698208 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qtpf\" (UniqueName: \"kubernetes.io/projected/2cfb6b15-6de9-4109-a346-f3140614b56f-kube-api-access-5qtpf\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698269 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-ovnkube-script-lib\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698299 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-systemd\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698323 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698359 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-etc-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698389 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-run-ovn-kubernetes\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698431 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-cni-bin\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698548 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-log-socket\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698581 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-node-log\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698632 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-systemd-units\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698665 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-cni-netd\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698687 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-env-overrides\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698782 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698801 4792 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698812 4792 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698824 4792 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698835 4792 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698850 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698861 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vd5d\" (UniqueName: \"kubernetes.io/projected/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-kube-api-access-2vd5d\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698875 4792 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2f79c130-fb71-4e1c-9e2d-ef492a0acb04-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.698960 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-var-lib-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699024 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-run-netns\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699203 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-ovnkube-script-lib\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699237 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-cni-bin\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699293 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-systemd\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699327 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-run-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699368 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-etc-openvswitch\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699406 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-run-ovn-kubernetes\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699438 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-env-overrides\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-slash\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699503 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-log-socket\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699510 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-systemd-units\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699469 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-node-log\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.699553 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2cfb6b15-6de9-4109-a346-f3140614b56f-host-cni-netd\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.700133 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2cfb6b15-6de9-4109-a346-f3140614b56f-ovnkube-config\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.705453 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2cfb6b15-6de9-4109-a346-f3140614b56f-ovn-node-metrics-cert\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.719023 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qtpf\" (UniqueName: \"kubernetes.io/projected/2cfb6b15-6de9-4109-a346-f3140614b56f-kube-api-access-5qtpf\") pod \"ovnkube-node-7rjvb\" (UID: \"2cfb6b15-6de9-4109-a346-f3140614b56f\") " pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.720169 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovnkube-controller/3.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.722150 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovn-acl-logging/0.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.722659 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4jhb5_2f79c130-fb71-4e1c-9e2d-ef492a0acb04/ovn-controller/0.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723098 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" exitCode=0 Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723295 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" exitCode=0 Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723317 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" exitCode=0 Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723328 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" exitCode=0 Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723337 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" exitCode=0 Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723345 4792 generic.go:334] "Generic (PLEG): container finished" podID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" containerID="38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" exitCode=143 Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723202 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723428 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723448 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723461 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723474 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723487 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723502 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723534 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723543 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723551 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723561 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723568 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723574 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723580 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723587 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723597 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" event={"ID":"2f79c130-fb71-4e1c-9e2d-ef492a0acb04","Type":"ContainerDied","Data":"78c44b30e29d8ceb2584886c61dcbcdeeffb7ebb62ae2123f27d88326b5bf322"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723608 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723615 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723622 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723629 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723636 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723644 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723652 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723659 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723665 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723672 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723687 4792 scope.go:117] "RemoveContainer" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.723266 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4jhb5" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.726499 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/2.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.727180 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/1.log" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.727212 4792 generic.go:334] "Generic (PLEG): container finished" podID="6925e194-2dc8-4a3a-aa76-8db41ff27997" containerID="585fe554fcc83a4fa1c4bb6351183665390e1742e0bd51b90f2345444377b8c3" exitCode=2 Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.727230 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerDied","Data":"585fe554fcc83a4fa1c4bb6351183665390e1742e0bd51b90f2345444377b8c3"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.727244 4792 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797"} Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.727589 4792 scope.go:117] "RemoveContainer" containerID="585fe554fcc83a4fa1c4bb6351183665390e1742e0bd51b90f2345444377b8c3" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.727759 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-dw25w_openshift-multus(6925e194-2dc8-4a3a-aa76-8db41ff27997)\"" pod="openshift-multus/multus-dw25w" podUID="6925e194-2dc8-4a3a-aa76-8db41ff27997" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.746174 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.775782 4792 scope.go:117] "RemoveContainer" containerID="8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.795944 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.803588 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4jhb5"] Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.803871 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4jhb5"] Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.812768 4792 scope.go:117] "RemoveContainer" containerID="04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.864716 4792 scope.go:117] "RemoveContainer" containerID="8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.880400 4792 scope.go:117] "RemoveContainer" containerID="4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.901094 4792 scope.go:117] "RemoveContainer" containerID="b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.926000 4792 scope.go:117] "RemoveContainer" containerID="dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.939055 4792 scope.go:117] "RemoveContainer" containerID="38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.956757 4792 scope.go:117] "RemoveContainer" containerID="e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.983155 4792 scope.go:117] "RemoveContainer" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.985849 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": container with ID starting with 1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132 not found: ID does not exist" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.985906 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} err="failed to get container status \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": rpc error: code = NotFound desc = could not find container \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": container with ID starting with 1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.985940 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.986628 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": container with ID starting with c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3 not found: ID does not exist" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.986662 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} err="failed to get container status \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": rpc error: code = NotFound desc = could not find container \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": container with ID starting with c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.986684 4792 scope.go:117] "RemoveContainer" containerID="8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.990917 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": container with ID starting with 8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d not found: ID does not exist" containerID="8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.990958 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} err="failed to get container status \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": rpc error: code = NotFound desc = could not find container \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": container with ID starting with 8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.990979 4792 scope.go:117] "RemoveContainer" containerID="04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.993142 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": container with ID starting with 04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd not found: ID does not exist" containerID="04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993165 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} err="failed to get container status \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": rpc error: code = NotFound desc = could not find container \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": container with ID starting with 04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993181 4792 scope.go:117] "RemoveContainer" containerID="8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.993408 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": container with ID starting with 8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b not found: ID does not exist" containerID="8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993427 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} err="failed to get container status \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": rpc error: code = NotFound desc = could not find container \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": container with ID starting with 8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993439 4792 scope.go:117] "RemoveContainer" containerID="4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.993657 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": container with ID starting with 4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b not found: ID does not exist" containerID="4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993677 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} err="failed to get container status \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": rpc error: code = NotFound desc = could not find container \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": container with ID starting with 4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993694 4792 scope.go:117] "RemoveContainer" containerID="b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.993892 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": container with ID starting with b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159 not found: ID does not exist" containerID="b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993908 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} err="failed to get container status \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": rpc error: code = NotFound desc = could not find container \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": container with ID starting with b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.993924 4792 scope.go:117] "RemoveContainer" containerID="dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.994118 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": container with ID starting with dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c not found: ID does not exist" containerID="dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994136 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} err="failed to get container status \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": rpc error: code = NotFound desc = could not find container \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": container with ID starting with dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994150 4792 scope.go:117] "RemoveContainer" containerID="38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.994323 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": container with ID starting with 38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab not found: ID does not exist" containerID="38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994341 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} err="failed to get container status \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": rpc error: code = NotFound desc = could not find container \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": container with ID starting with 38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994355 4792 scope.go:117] "RemoveContainer" containerID="e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82" Dec 02 18:48:12 crc kubenswrapper[4792]: E1202 18:48:12.994580 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": container with ID starting with e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82 not found: ID does not exist" containerID="e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994599 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82"} err="failed to get container status \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": rpc error: code = NotFound desc = could not find container \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": container with ID starting with e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994611 4792 scope.go:117] "RemoveContainer" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994790 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} err="failed to get container status \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": rpc error: code = NotFound desc = could not find container \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": container with ID starting with 1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.994808 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995122 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} err="failed to get container status \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": rpc error: code = NotFound desc = could not find container \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": container with ID starting with c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995151 4792 scope.go:117] "RemoveContainer" containerID="8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995380 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} err="failed to get container status \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": rpc error: code = NotFound desc = could not find container \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": container with ID starting with 8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995409 4792 scope.go:117] "RemoveContainer" containerID="04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995703 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} err="failed to get container status \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": rpc error: code = NotFound desc = could not find container \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": container with ID starting with 04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995721 4792 scope.go:117] "RemoveContainer" containerID="8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995905 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} err="failed to get container status \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": rpc error: code = NotFound desc = could not find container \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": container with ID starting with 8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.995922 4792 scope.go:117] "RemoveContainer" containerID="4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996095 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} err="failed to get container status \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": rpc error: code = NotFound desc = could not find container \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": container with ID starting with 4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996112 4792 scope.go:117] "RemoveContainer" containerID="b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996272 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} err="failed to get container status \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": rpc error: code = NotFound desc = could not find container \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": container with ID starting with b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996289 4792 scope.go:117] "RemoveContainer" containerID="dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996547 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} err="failed to get container status \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": rpc error: code = NotFound desc = could not find container \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": container with ID starting with dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996566 4792 scope.go:117] "RemoveContainer" containerID="38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996789 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} err="failed to get container status \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": rpc error: code = NotFound desc = could not find container \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": container with ID starting with 38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.996807 4792 scope.go:117] "RemoveContainer" containerID="e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997007 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82"} err="failed to get container status \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": rpc error: code = NotFound desc = could not find container \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": container with ID starting with e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997023 4792 scope.go:117] "RemoveContainer" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997235 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} err="failed to get container status \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": rpc error: code = NotFound desc = could not find container \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": container with ID starting with 1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997253 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997482 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} err="failed to get container status \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": rpc error: code = NotFound desc = could not find container \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": container with ID starting with c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997501 4792 scope.go:117] "RemoveContainer" containerID="8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997734 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} err="failed to get container status \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": rpc error: code = NotFound desc = could not find container \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": container with ID starting with 8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997751 4792 scope.go:117] "RemoveContainer" containerID="04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997941 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} err="failed to get container status \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": rpc error: code = NotFound desc = could not find container \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": container with ID starting with 04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.997959 4792 scope.go:117] "RemoveContainer" containerID="8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998186 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} err="failed to get container status \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": rpc error: code = NotFound desc = could not find container \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": container with ID starting with 8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998204 4792 scope.go:117] "RemoveContainer" containerID="4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998383 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} err="failed to get container status \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": rpc error: code = NotFound desc = could not find container \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": container with ID starting with 4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998406 4792 scope.go:117] "RemoveContainer" containerID="b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998601 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} err="failed to get container status \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": rpc error: code = NotFound desc = could not find container \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": container with ID starting with b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998618 4792 scope.go:117] "RemoveContainer" containerID="dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998781 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} err="failed to get container status \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": rpc error: code = NotFound desc = could not find container \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": container with ID starting with dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998796 4792 scope.go:117] "RemoveContainer" containerID="38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.998985 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} err="failed to get container status \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": rpc error: code = NotFound desc = could not find container \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": container with ID starting with 38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.999004 4792 scope.go:117] "RemoveContainer" containerID="e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.999196 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82"} err="failed to get container status \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": rpc error: code = NotFound desc = could not find container \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": container with ID starting with e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.999212 4792 scope.go:117] "RemoveContainer" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.999370 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} err="failed to get container status \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": rpc error: code = NotFound desc = could not find container \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": container with ID starting with 1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.999388 4792 scope.go:117] "RemoveContainer" containerID="c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.999604 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3"} err="failed to get container status \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": rpc error: code = NotFound desc = could not find container \"c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3\": container with ID starting with c149ca0a128d1e33a8df95c177c80ba3a8dbb2caf877124ba7e10ff195808ad3 not found: ID does not exist" Dec 02 18:48:12 crc kubenswrapper[4792]: I1202 18:48:12.999621 4792 scope.go:117] "RemoveContainer" containerID="8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.000652 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d"} err="failed to get container status \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": rpc error: code = NotFound desc = could not find container \"8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d\": container with ID starting with 8fdecb57a331e2dd4e96098ac33aa4049b495c7ff47ab0e28ac129ce5b64111d not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.000674 4792 scope.go:117] "RemoveContainer" containerID="04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.000866 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd"} err="failed to get container status \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": rpc error: code = NotFound desc = could not find container \"04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd\": container with ID starting with 04020010567f8dc6a1924f2aa5b0a261e83810e0639500aea5a570826c99efdd not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.000885 4792 scope.go:117] "RemoveContainer" containerID="8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001071 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b"} err="failed to get container status \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": rpc error: code = NotFound desc = could not find container \"8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b\": container with ID starting with 8a9426245cc7241e6364dd15869c54b7edb12238f5cf77704a83e69b49ebe93b not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001089 4792 scope.go:117] "RemoveContainer" containerID="4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001253 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b"} err="failed to get container status \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": rpc error: code = NotFound desc = could not find container \"4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b\": container with ID starting with 4d5c6d711c71c4906dba0a58b255283c7d3fb5e203e66e86e95efca3c3411b7b not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001269 4792 scope.go:117] "RemoveContainer" containerID="b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001431 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159"} err="failed to get container status \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": rpc error: code = NotFound desc = could not find container \"b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159\": container with ID starting with b5e24b27f2794f3dcb05c2d99164e7e69782dff170169db33837d1bd609d9159 not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001447 4792 scope.go:117] "RemoveContainer" containerID="dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001679 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c"} err="failed to get container status \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": rpc error: code = NotFound desc = could not find container \"dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c\": container with ID starting with dcd834333586748c2401b9cfcc68c1ac24f32c029b8e2d0ade8fae33af99802c not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.001696 4792 scope.go:117] "RemoveContainer" containerID="38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.002337 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab"} err="failed to get container status \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": rpc error: code = NotFound desc = could not find container \"38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab\": container with ID starting with 38bb5c6a4dab638701752c1c7c5a25cbe198d89e186f3150e757961b033e62ab not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.002354 4792 scope.go:117] "RemoveContainer" containerID="e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.002563 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82"} err="failed to get container status \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": rpc error: code = NotFound desc = could not find container \"e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82\": container with ID starting with e4b3acb05c6f215d2b68a4840bf4bff2557973246a301c79d27dfe118c874b82 not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.002581 4792 scope.go:117] "RemoveContainer" containerID="1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.002767 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132"} err="failed to get container status \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": rpc error: code = NotFound desc = could not find container \"1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132\": container with ID starting with 1f4b2f205261a5f63018ceba56a93274528de16d5903727bc5023cceae7a5132 not found: ID does not exist" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.546904 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f79c130-fb71-4e1c-9e2d-ef492a0acb04" path="/var/lib/kubelet/pods/2f79c130-fb71-4e1c-9e2d-ef492a0acb04/volumes" Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.733684 4792 generic.go:334] "Generic (PLEG): container finished" podID="2cfb6b15-6de9-4109-a346-f3140614b56f" containerID="f39246fd23e206726dce2364f86bf4a08f3af9c00859d37df48f5604311ec9e0" exitCode=0 Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.733763 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerDied","Data":"f39246fd23e206726dce2364f86bf4a08f3af9c00859d37df48f5604311ec9e0"} Dec 02 18:48:13 crc kubenswrapper[4792]: I1202 18:48:13.733793 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"374f0d0c4bbf3c9401913aa78d2b996419b7209d3a0dca42d59d20802d78bd89"} Dec 02 18:48:14 crc kubenswrapper[4792]: I1202 18:48:14.747657 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"93c49414491a45d6ffc99f0ccb3905fb4f3ad769b3655195b3104ec2f811f14d"} Dec 02 18:48:14 crc kubenswrapper[4792]: I1202 18:48:14.748092 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"09fab77940c428024e7b309a402ba8ec9db8769a9d01c2f201ade4c344b23789"} Dec 02 18:48:14 crc kubenswrapper[4792]: I1202 18:48:14.748107 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"b1bcbf357ecfa5fb7dab26bc31d878f6241b53980d61a731f6c3a78603b9466c"} Dec 02 18:48:14 crc kubenswrapper[4792]: I1202 18:48:14.748118 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"20fe9c4ceab71963545ba9e7a88c710fcd37900d11f95e6ca2e1c12af10ae4b5"} Dec 02 18:48:14 crc kubenswrapper[4792]: I1202 18:48:14.748128 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"4594b9fb2815399827e91535b19ec0c33d8310063857ae1b98fe4107f957259f"} Dec 02 18:48:15 crc kubenswrapper[4792]: I1202 18:48:15.769679 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"16eb937bf1b424dea5f3d8f0c73aef2158a34958091a96fa47b70e4440539a43"} Dec 02 18:48:17 crc kubenswrapper[4792]: I1202 18:48:17.785741 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"18ef2d57a53c26e409ce4fd593453706a882eeb88a3ece39080bf92809c3d0f5"} Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.639982 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b"] Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.640735 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.643010 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.643334 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.643769 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-clbmq" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.740256 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvf98\" (UniqueName: \"kubernetes.io/projected/9b8eb6d9-8320-401e-8092-5333c1772c4e-kube-api-access-pvf98\") pod \"obo-prometheus-operator-668cf9dfbb-5vh6b\" (UID: \"9b8eb6d9-8320-401e-8092-5333c1772c4e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.768324 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9"] Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.769271 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.771222 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-w7n2k" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.771578 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.773728 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d"] Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.774418 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.841478 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvf98\" (UniqueName: \"kubernetes.io/projected/9b8eb6d9-8320-401e-8092-5333c1772c4e-kube-api-access-pvf98\") pod \"obo-prometheus-operator-668cf9dfbb-5vh6b\" (UID: \"9b8eb6d9-8320-401e-8092-5333c1772c4e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.841582 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/aeed3219-8084-40fd-888a-1e4bc4dd3179-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9\" (UID: \"aeed3219-8084-40fd-888a-1e4bc4dd3179\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.841646 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d4a8cfbd-83cc-47ee-abe2-f48802bb58e4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d\" (UID: \"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.841669 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d4a8cfbd-83cc-47ee-abe2-f48802bb58e4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d\" (UID: \"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.841897 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/aeed3219-8084-40fd-888a-1e4bc4dd3179-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9\" (UID: \"aeed3219-8084-40fd-888a-1e4bc4dd3179\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.871428 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvf98\" (UniqueName: \"kubernetes.io/projected/9b8eb6d9-8320-401e-8092-5333c1772c4e-kube-api-access-pvf98\") pod \"obo-prometheus-operator-668cf9dfbb-5vh6b\" (UID: \"9b8eb6d9-8320-401e-8092-5333c1772c4e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.942348 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/aeed3219-8084-40fd-888a-1e4bc4dd3179-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9\" (UID: \"aeed3219-8084-40fd-888a-1e4bc4dd3179\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.942760 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d4a8cfbd-83cc-47ee-abe2-f48802bb58e4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d\" (UID: \"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.942867 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d4a8cfbd-83cc-47ee-abe2-f48802bb58e4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d\" (UID: \"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.942970 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/aeed3219-8084-40fd-888a-1e4bc4dd3179-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9\" (UID: \"aeed3219-8084-40fd-888a-1e4bc4dd3179\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.946432 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/aeed3219-8084-40fd-888a-1e4bc4dd3179-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9\" (UID: \"aeed3219-8084-40fd-888a-1e4bc4dd3179\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.948645 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/aeed3219-8084-40fd-888a-1e4bc4dd3179-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9\" (UID: \"aeed3219-8084-40fd-888a-1e4bc4dd3179\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.949106 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d4a8cfbd-83cc-47ee-abe2-f48802bb58e4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d\" (UID: \"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.949202 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d4a8cfbd-83cc-47ee-abe2-f48802bb58e4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d\" (UID: \"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.958621 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.982504 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-jg8gh"] Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.983946 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.986053 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-nlq44" Dec 02 18:48:18 crc kubenswrapper[4792]: I1202 18:48:18.989588 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 02 18:48:18 crc kubenswrapper[4792]: E1202 18:48:18.990677 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(637f07d4721e2218588aeac501d6d0057a518ee15bfa94bfe70215f682f53ba2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:18 crc kubenswrapper[4792]: E1202 18:48:18.990764 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(637f07d4721e2218588aeac501d6d0057a518ee15bfa94bfe70215f682f53ba2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:18 crc kubenswrapper[4792]: E1202 18:48:18.990806 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(637f07d4721e2218588aeac501d6d0057a518ee15bfa94bfe70215f682f53ba2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:18 crc kubenswrapper[4792]: E1202 18:48:18.990850 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators(9b8eb6d9-8320-401e-8092-5333c1772c4e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators(9b8eb6d9-8320-401e-8092-5333c1772c4e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(637f07d4721e2218588aeac501d6d0057a518ee15bfa94bfe70215f682f53ba2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" podUID="9b8eb6d9-8320-401e-8092-5333c1772c4e" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.043903 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2zw6\" (UniqueName: \"kubernetes.io/projected/eba5ee28-f55c-4e22-b4f7-22899eb4fdb7-kube-api-access-v2zw6\") pod \"observability-operator-d8bb48f5d-jg8gh\" (UID: \"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7\") " pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.043963 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/eba5ee28-f55c-4e22-b4f7-22899eb4fdb7-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-jg8gh\" (UID: \"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7\") " pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.084867 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.095322 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.108847 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(d4739fdbe8b9052bda3d28b8be32c037cccbb89995103f3245c58b1e5b23a3b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.108937 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(d4739fdbe8b9052bda3d28b8be32c037cccbb89995103f3245c58b1e5b23a3b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.108965 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(d4739fdbe8b9052bda3d28b8be32c037cccbb89995103f3245c58b1e5b23a3b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.109027 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators(aeed3219-8084-40fd-888a-1e4bc4dd3179)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators(aeed3219-8084-40fd-888a-1e4bc4dd3179)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(d4739fdbe8b9052bda3d28b8be32c037cccbb89995103f3245c58b1e5b23a3b6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" podUID="aeed3219-8084-40fd-888a-1e4bc4dd3179" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.120296 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(5ff0b670065ab98b7e827fecad662f655389a2a18afac0ea053c35066e67eed9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.120370 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(5ff0b670065ab98b7e827fecad662f655389a2a18afac0ea053c35066e67eed9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.120392 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(5ff0b670065ab98b7e827fecad662f655389a2a18afac0ea053c35066e67eed9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.120447 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators(d4a8cfbd-83cc-47ee-abe2-f48802bb58e4)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators(d4a8cfbd-83cc-47ee-abe2-f48802bb58e4)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(5ff0b670065ab98b7e827fecad662f655389a2a18afac0ea053c35066e67eed9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" podUID="d4a8cfbd-83cc-47ee-abe2-f48802bb58e4" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.145669 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2zw6\" (UniqueName: \"kubernetes.io/projected/eba5ee28-f55c-4e22-b4f7-22899eb4fdb7-kube-api-access-v2zw6\") pod \"observability-operator-d8bb48f5d-jg8gh\" (UID: \"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7\") " pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.146100 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/eba5ee28-f55c-4e22-b4f7-22899eb4fdb7-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-jg8gh\" (UID: \"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7\") " pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.151136 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/eba5ee28-f55c-4e22-b4f7-22899eb4fdb7-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-jg8gh\" (UID: \"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7\") " pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.168599 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-nqbpz"] Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.169414 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.172137 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2zw6\" (UniqueName: \"kubernetes.io/projected/eba5ee28-f55c-4e22-b4f7-22899eb4fdb7-kube-api-access-v2zw6\") pod \"observability-operator-d8bb48f5d-jg8gh\" (UID: \"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7\") " pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.172714 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-2992c" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.247027 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d953db38-bd34-4d90-9c21-64ed4b3feaaf-openshift-service-ca\") pod \"perses-operator-5446b9c989-nqbpz\" (UID: \"d953db38-bd34-4d90-9c21-64ed4b3feaaf\") " pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.247245 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df7b7\" (UniqueName: \"kubernetes.io/projected/d953db38-bd34-4d90-9c21-64ed4b3feaaf-kube-api-access-df7b7\") pod \"perses-operator-5446b9c989-nqbpz\" (UID: \"d953db38-bd34-4d90-9c21-64ed4b3feaaf\") " pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.332607 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.351489 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df7b7\" (UniqueName: \"kubernetes.io/projected/d953db38-bd34-4d90-9c21-64ed4b3feaaf-kube-api-access-df7b7\") pod \"perses-operator-5446b9c989-nqbpz\" (UID: \"d953db38-bd34-4d90-9c21-64ed4b3feaaf\") " pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.351606 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d953db38-bd34-4d90-9c21-64ed4b3feaaf-openshift-service-ca\") pod \"perses-operator-5446b9c989-nqbpz\" (UID: \"d953db38-bd34-4d90-9c21-64ed4b3feaaf\") " pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.352868 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d953db38-bd34-4d90-9c21-64ed4b3feaaf-openshift-service-ca\") pod \"perses-operator-5446b9c989-nqbpz\" (UID: \"d953db38-bd34-4d90-9c21-64ed4b3feaaf\") " pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.358784 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(af0b974ace779295501919f12e0dfa549236b2165d268df01d1ba6a0e231d242): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.358897 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(af0b974ace779295501919f12e0dfa549236b2165d268df01d1ba6a0e231d242): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.358936 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(af0b974ace779295501919f12e0dfa549236b2165d268df01d1ba6a0e231d242): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.359010 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-jg8gh_openshift-operators(eba5ee28-f55c-4e22-b4f7-22899eb4fdb7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-jg8gh_openshift-operators(eba5ee28-f55c-4e22-b4f7-22899eb4fdb7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(af0b974ace779295501919f12e0dfa549236b2165d268df01d1ba6a0e231d242): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" podUID="eba5ee28-f55c-4e22-b4f7-22899eb4fdb7" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.373473 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df7b7\" (UniqueName: \"kubernetes.io/projected/d953db38-bd34-4d90-9c21-64ed4b3feaaf-kube-api-access-df7b7\") pod \"perses-operator-5446b9c989-nqbpz\" (UID: \"d953db38-bd34-4d90-9c21-64ed4b3feaaf\") " pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.508401 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.532571 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(59bfd7dc0fff08050ed635877bdf9543c6ba564a232f1feb6793d433f3b7efff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.532653 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(59bfd7dc0fff08050ed635877bdf9543c6ba564a232f1feb6793d433f3b7efff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.532675 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(59bfd7dc0fff08050ed635877bdf9543c6ba564a232f1feb6793d433f3b7efff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:19 crc kubenswrapper[4792]: E1202 18:48:19.532729 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-nqbpz_openshift-operators(d953db38-bd34-4d90-9c21-64ed4b3feaaf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-nqbpz_openshift-operators(d953db38-bd34-4d90-9c21-64ed4b3feaaf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(59bfd7dc0fff08050ed635877bdf9543c6ba564a232f1feb6793d433f3b7efff): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" podUID="d953db38-bd34-4d90-9c21-64ed4b3feaaf" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.800968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" event={"ID":"2cfb6b15-6de9-4109-a346-f3140614b56f","Type":"ContainerStarted","Data":"6a3d56876d95fa99ea00e9623e9a30016f5abea21b8182fd3a5c7adbc2cbcf49"} Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.801231 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.801320 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.801375 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.837467 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.853287 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" podStartSLOduration=7.853269292 podStartE2EDuration="7.853269292s" podCreationTimestamp="2025-12-02 18:48:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:48:19.848406677 +0000 UTC m=+730.621299025" watchObservedRunningTime="2025-12-02 18:48:19.853269292 +0000 UTC m=+730.626161620" Dec 02 18:48:19 crc kubenswrapper[4792]: I1202 18:48:19.871674 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.334867 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d"] Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.335031 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.335459 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.338298 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-jg8gh"] Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.338407 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.338825 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.351475 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9"] Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.351718 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.352149 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.357787 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-nqbpz"] Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.357897 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.358445 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.376789 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(52a23d7857bff77f4f09cc7ad6886c0a7f561b315178438fc7086c27a0113e2a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.376877 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(52a23d7857bff77f4f09cc7ad6886c0a7f561b315178438fc7086c27a0113e2a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.376899 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(52a23d7857bff77f4f09cc7ad6886c0a7f561b315178438fc7086c27a0113e2a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.376944 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-jg8gh_openshift-operators(eba5ee28-f55c-4e22-b4f7-22899eb4fdb7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-jg8gh_openshift-operators(eba5ee28-f55c-4e22-b4f7-22899eb4fdb7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-jg8gh_openshift-operators_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7_0(52a23d7857bff77f4f09cc7ad6886c0a7f561b315178438fc7086c27a0113e2a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" podUID="eba5ee28-f55c-4e22-b4f7-22899eb4fdb7" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.389062 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(120b51a25e6d2e5ed2121d8b2abdcbded8a68532e36b53db0b6de122b84fb3f0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.389140 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(120b51a25e6d2e5ed2121d8b2abdcbded8a68532e36b53db0b6de122b84fb3f0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.389163 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(120b51a25e6d2e5ed2121d8b2abdcbded8a68532e36b53db0b6de122b84fb3f0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.389223 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators(d4a8cfbd-83cc-47ee-abe2-f48802bb58e4)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators(d4a8cfbd-83cc-47ee-abe2-f48802bb58e4)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4_0(120b51a25e6d2e5ed2121d8b2abdcbded8a68532e36b53db0b6de122b84fb3f0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" podUID="d4a8cfbd-83cc-47ee-abe2-f48802bb58e4" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.405794 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b"] Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.405937 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:20 crc kubenswrapper[4792]: I1202 18:48:20.406338 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.439919 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(07559c1dcbdbfc672812e5937a040e6c77024c282caaeef8567482936cf92cee): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.439998 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(07559c1dcbdbfc672812e5937a040e6c77024c282caaeef8567482936cf92cee): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.440024 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(07559c1dcbdbfc672812e5937a040e6c77024c282caaeef8567482936cf92cee): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.440075 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators(aeed3219-8084-40fd-888a-1e4bc4dd3179)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators(aeed3219-8084-40fd-888a-1e4bc4dd3179)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators_aeed3219-8084-40fd-888a-1e4bc4dd3179_0(07559c1dcbdbfc672812e5937a040e6c77024c282caaeef8567482936cf92cee): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" podUID="aeed3219-8084-40fd-888a-1e4bc4dd3179" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.454749 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(57842752b94c80be2619ad2775fce01a8e8540a062326fd8a6d8dff03ae216f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.454826 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(57842752b94c80be2619ad2775fce01a8e8540a062326fd8a6d8dff03ae216f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.454850 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(57842752b94c80be2619ad2775fce01a8e8540a062326fd8a6d8dff03ae216f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.454906 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-nqbpz_openshift-operators(d953db38-bd34-4d90-9c21-64ed4b3feaaf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-nqbpz_openshift-operators(d953db38-bd34-4d90-9c21-64ed4b3feaaf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-nqbpz_openshift-operators_d953db38-bd34-4d90-9c21-64ed4b3feaaf_0(57842752b94c80be2619ad2775fce01a8e8540a062326fd8a6d8dff03ae216f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" podUID="d953db38-bd34-4d90-9c21-64ed4b3feaaf" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.459933 4792 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(e095ab0315b86d0e08f6469c08717c928f654d09be24a59ce75cdc133bba1844): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.460018 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(e095ab0315b86d0e08f6469c08717c928f654d09be24a59ce75cdc133bba1844): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.460043 4792 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(e095ab0315b86d0e08f6469c08717c928f654d09be24a59ce75cdc133bba1844): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:20 crc kubenswrapper[4792]: E1202 18:48:20.460107 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators(9b8eb6d9-8320-401e-8092-5333c1772c4e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators(9b8eb6d9-8320-401e-8092-5333c1772c4e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators_9b8eb6d9-8320-401e-8092-5333c1772c4e_0(e095ab0315b86d0e08f6469c08717c928f654d09be24a59ce75cdc133bba1844): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" podUID="9b8eb6d9-8320-401e-8092-5333c1772c4e" Dec 02 18:48:25 crc kubenswrapper[4792]: I1202 18:48:25.539220 4792 scope.go:117] "RemoveContainer" containerID="585fe554fcc83a4fa1c4bb6351183665390e1742e0bd51b90f2345444377b8c3" Dec 02 18:48:26 crc kubenswrapper[4792]: I1202 18:48:26.851721 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/2.log" Dec 02 18:48:26 crc kubenswrapper[4792]: I1202 18:48:26.854052 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/1.log" Dec 02 18:48:26 crc kubenswrapper[4792]: I1202 18:48:26.854135 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dw25w" event={"ID":"6925e194-2dc8-4a3a-aa76-8db41ff27997","Type":"ContainerStarted","Data":"237176d4c4b78eeec87d8b373f557a4bd9e9137e3e83c170fe22572c38552f14"} Dec 02 18:48:32 crc kubenswrapper[4792]: I1202 18:48:32.539030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:32 crc kubenswrapper[4792]: I1202 18:48:32.540209 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:48:32 crc kubenswrapper[4792]: I1202 18:48:32.904352 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-jg8gh"] Dec 02 18:48:32 crc kubenswrapper[4792]: W1202 18:48:32.917793 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeba5ee28_f55c_4e22_b4f7_22899eb4fdb7.slice/crio-ffdb0eb31ab1ddae38b48521b349457c585e17e49a2d55deb9c130235b5dfab3 WatchSource:0}: Error finding container ffdb0eb31ab1ddae38b48521b349457c585e17e49a2d55deb9c130235b5dfab3: Status 404 returned error can't find the container with id ffdb0eb31ab1ddae38b48521b349457c585e17e49a2d55deb9c130235b5dfab3 Dec 02 18:48:33 crc kubenswrapper[4792]: I1202 18:48:33.539952 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:33 crc kubenswrapper[4792]: I1202 18:48:33.540634 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" Dec 02 18:48:33 crc kubenswrapper[4792]: I1202 18:48:33.751846 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b"] Dec 02 18:48:33 crc kubenswrapper[4792]: I1202 18:48:33.893403 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" event={"ID":"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7","Type":"ContainerStarted","Data":"ffdb0eb31ab1ddae38b48521b349457c585e17e49a2d55deb9c130235b5dfab3"} Dec 02 18:48:33 crc kubenswrapper[4792]: I1202 18:48:33.894724 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" event={"ID":"9b8eb6d9-8320-401e-8092-5333c1772c4e","Type":"ContainerStarted","Data":"94ec6fd5c3a889c339c296e5efed0408c54817a048da860f9946b797015ea2dd"} Dec 02 18:48:34 crc kubenswrapper[4792]: I1202 18:48:34.539343 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:34 crc kubenswrapper[4792]: I1202 18:48:34.540078 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" Dec 02 18:48:34 crc kubenswrapper[4792]: I1202 18:48:34.796907 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d"] Dec 02 18:48:34 crc kubenswrapper[4792]: W1202 18:48:34.805646 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4a8cfbd_83cc_47ee_abe2_f48802bb58e4.slice/crio-b12ce245900f54e6b3b05cf8463bec18400e5c3ae98c1561eb32e3681c34f3e0 WatchSource:0}: Error finding container b12ce245900f54e6b3b05cf8463bec18400e5c3ae98c1561eb32e3681c34f3e0: Status 404 returned error can't find the container with id b12ce245900f54e6b3b05cf8463bec18400e5c3ae98c1561eb32e3681c34f3e0 Dec 02 18:48:34 crc kubenswrapper[4792]: I1202 18:48:34.901008 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" event={"ID":"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4","Type":"ContainerStarted","Data":"b12ce245900f54e6b3b05cf8463bec18400e5c3ae98c1561eb32e3681c34f3e0"} Dec 02 18:48:35 crc kubenswrapper[4792]: I1202 18:48:35.538953 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:35 crc kubenswrapper[4792]: I1202 18:48:35.539029 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:35 crc kubenswrapper[4792]: I1202 18:48:35.539592 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" Dec 02 18:48:35 crc kubenswrapper[4792]: I1202 18:48:35.539965 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:48:35 crc kubenswrapper[4792]: W1202 18:48:35.885256 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaeed3219_8084_40fd_888a_1e4bc4dd3179.slice/crio-1f0dfc5294e731abe071daf651b39793fe59065ed634b79ff1f9a3cba58d0d2d WatchSource:0}: Error finding container 1f0dfc5294e731abe071daf651b39793fe59065ed634b79ff1f9a3cba58d0d2d: Status 404 returned error can't find the container with id 1f0dfc5294e731abe071daf651b39793fe59065ed634b79ff1f9a3cba58d0d2d Dec 02 18:48:35 crc kubenswrapper[4792]: I1202 18:48:35.899433 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9"] Dec 02 18:48:35 crc kubenswrapper[4792]: I1202 18:48:35.910503 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" event={"ID":"aeed3219-8084-40fd-888a-1e4bc4dd3179","Type":"ContainerStarted","Data":"1f0dfc5294e731abe071daf651b39793fe59065ed634b79ff1f9a3cba58d0d2d"} Dec 02 18:48:36 crc kubenswrapper[4792]: I1202 18:48:36.041248 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-nqbpz"] Dec 02 18:48:36 crc kubenswrapper[4792]: W1202 18:48:36.046229 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd953db38_bd34_4d90_9c21_64ed4b3feaaf.slice/crio-66a8bfb5e0478ad5b9f8c3c9010ac2eb45fde19a7e4f6df7f2e0ad8e62d30293 WatchSource:0}: Error finding container 66a8bfb5e0478ad5b9f8c3c9010ac2eb45fde19a7e4f6df7f2e0ad8e62d30293: Status 404 returned error can't find the container with id 66a8bfb5e0478ad5b9f8c3c9010ac2eb45fde19a7e4f6df7f2e0ad8e62d30293 Dec 02 18:48:36 crc kubenswrapper[4792]: I1202 18:48:36.917933 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" event={"ID":"d953db38-bd34-4d90-9c21-64ed4b3feaaf","Type":"ContainerStarted","Data":"66a8bfb5e0478ad5b9f8c3c9010ac2eb45fde19a7e4f6df7f2e0ad8e62d30293"} Dec 02 18:48:38 crc kubenswrapper[4792]: I1202 18:48:38.081823 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:48:38 crc kubenswrapper[4792]: I1202 18:48:38.081888 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:48:40 crc kubenswrapper[4792]: I1202 18:48:40.348914 4792 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 18:48:42 crc kubenswrapper[4792]: I1202 18:48:42.844597 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7rjvb" Dec 02 18:48:55 crc kubenswrapper[4792]: E1202 18:48:55.675881 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 02 18:48:55 crc kubenswrapper[4792]: E1202 18:48:55.676771 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v2zw6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-jg8gh_openshift-operators(eba5ee28-f55c-4e22-b4f7-22899eb4fdb7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:48:55 crc kubenswrapper[4792]: E1202 18:48:55.678108 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" podUID="eba5ee28-f55c-4e22-b4f7-22899eb4fdb7" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.046458 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" podUID="eba5ee28-f55c-4e22-b4f7-22899eb4fdb7" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.241998 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.242302 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_openshift-operators(aeed3219-8084-40fd-888a-1e4bc4dd3179): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.243707 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" podUID="aeed3219-8084-40fd-888a-1e4bc4dd3179" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.766434 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.766654 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_openshift-operators(d4a8cfbd-83cc-47ee-abe2-f48802bb58e4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.768492 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" podUID="d4a8cfbd-83cc-47ee-abe2-f48802bb58e4" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.853836 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.854097 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-df7b7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-nqbpz_openshift-operators(d953db38-bd34-4d90-9c21-64ed4b3feaaf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:48:56 crc kubenswrapper[4792]: E1202 18:48:56.855319 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" podUID="d953db38-bd34-4d90-9c21-64ed4b3feaaf" Dec 02 18:48:57 crc kubenswrapper[4792]: E1202 18:48:57.052664 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" podUID="aeed3219-8084-40fd-888a-1e4bc4dd3179" Dec 02 18:48:57 crc kubenswrapper[4792]: E1202 18:48:57.052954 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" podUID="d4a8cfbd-83cc-47ee-abe2-f48802bb58e4" Dec 02 18:48:57 crc kubenswrapper[4792]: E1202 18:48:57.053221 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" podUID="d953db38-bd34-4d90-9c21-64ed4b3feaaf" Dec 02 18:48:57 crc kubenswrapper[4792]: E1202 18:48:57.649321 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 02 18:48:57 crc kubenswrapper[4792]: E1202 18:48:57.649622 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pvf98,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-5vh6b_openshift-operators(9b8eb6d9-8320-401e-8092-5333c1772c4e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 18:48:57 crc kubenswrapper[4792]: E1202 18:48:57.651574 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" podUID="9b8eb6d9-8320-401e-8092-5333c1772c4e" Dec 02 18:48:58 crc kubenswrapper[4792]: E1202 18:48:58.060292 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" podUID="9b8eb6d9-8320-401e-8092-5333c1772c4e" Dec 02 18:49:08 crc kubenswrapper[4792]: I1202 18:49:08.082708 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:49:08 crc kubenswrapper[4792]: I1202 18:49:08.083519 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:49:09 crc kubenswrapper[4792]: I1202 18:49:09.976385 4792 scope.go:117] "RemoveContainer" containerID="58258d39cb6d0616429a94e2f3542d2250c8e8a89d393cf5667aa0b877d03797" Dec 02 18:49:10 crc kubenswrapper[4792]: I1202 18:49:10.151501 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" event={"ID":"d4a8cfbd-83cc-47ee-abe2-f48802bb58e4","Type":"ContainerStarted","Data":"648f984d159d12f59b1a9b3357c0ee114269a121f558fea3042c0ac9202618a2"} Dec 02 18:49:10 crc kubenswrapper[4792]: I1202 18:49:10.154754 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dw25w_6925e194-2dc8-4a3a-aa76-8db41ff27997/kube-multus/2.log" Dec 02 18:49:10 crc kubenswrapper[4792]: I1202 18:49:10.174593 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d" podStartSLOduration=17.647416593 podStartE2EDuration="52.174571897s" podCreationTimestamp="2025-12-02 18:48:18 +0000 UTC" firstStartedPulling="2025-12-02 18:48:34.809344306 +0000 UTC m=+745.582236634" lastFinishedPulling="2025-12-02 18:49:09.33649958 +0000 UTC m=+780.109391938" observedRunningTime="2025-12-02 18:49:10.172565356 +0000 UTC m=+780.945457744" watchObservedRunningTime="2025-12-02 18:49:10.174571897 +0000 UTC m=+780.947464265" Dec 02 18:49:13 crc kubenswrapper[4792]: I1202 18:49:13.177642 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" event={"ID":"9b8eb6d9-8320-401e-8092-5333c1772c4e","Type":"ContainerStarted","Data":"6008535079d2bf76b1a9641721461da10479d828ad481aa656dc99cf00d27b22"} Dec 02 18:49:13 crc kubenswrapper[4792]: I1202 18:49:13.180163 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" event={"ID":"d953db38-bd34-4d90-9c21-64ed4b3feaaf","Type":"ContainerStarted","Data":"4f73cf02efd65972d93268396da8a630b2c35876813831bef00aa9dd48134f8c"} Dec 02 18:49:13 crc kubenswrapper[4792]: I1202 18:49:13.181200 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:49:13 crc kubenswrapper[4792]: I1202 18:49:13.209795 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" podStartSLOduration=17.907052483 podStartE2EDuration="54.209769244s" podCreationTimestamp="2025-12-02 18:48:19 +0000 UTC" firstStartedPulling="2025-12-02 18:48:36.049417924 +0000 UTC m=+746.822310262" lastFinishedPulling="2025-12-02 18:49:12.352134655 +0000 UTC m=+783.125027023" observedRunningTime="2025-12-02 18:49:13.207747282 +0000 UTC m=+783.980639660" watchObservedRunningTime="2025-12-02 18:49:13.209769244 +0000 UTC m=+783.982661612" Dec 02 18:49:14 crc kubenswrapper[4792]: I1202 18:49:14.189840 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" event={"ID":"eba5ee28-f55c-4e22-b4f7-22899eb4fdb7","Type":"ContainerStarted","Data":"eaef3a0627a5928493a8b44052bc288f61cfc9839f38920889d5e3f3d38ca296"} Dec 02 18:49:14 crc kubenswrapper[4792]: I1202 18:49:14.190996 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:49:14 crc kubenswrapper[4792]: I1202 18:49:14.192584 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" event={"ID":"aeed3219-8084-40fd-888a-1e4bc4dd3179","Type":"ContainerStarted","Data":"c658708ff2184678081ece60bec1a72cf9b7b335d310da0bc530c33250939c59"} Dec 02 18:49:14 crc kubenswrapper[4792]: I1202 18:49:14.227518 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" podStartSLOduration=15.685316887 podStartE2EDuration="56.227486653s" podCreationTimestamp="2025-12-02 18:48:18 +0000 UTC" firstStartedPulling="2025-12-02 18:48:32.920089201 +0000 UTC m=+743.692981529" lastFinishedPulling="2025-12-02 18:49:13.462258967 +0000 UTC m=+784.235151295" observedRunningTime="2025-12-02 18:49:14.221287094 +0000 UTC m=+784.994179472" watchObservedRunningTime="2025-12-02 18:49:14.227486653 +0000 UTC m=+785.000379021" Dec 02 18:49:14 crc kubenswrapper[4792]: I1202 18:49:14.250607 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-jg8gh" Dec 02 18:49:14 crc kubenswrapper[4792]: I1202 18:49:14.256081 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5vh6b" podStartSLOduration=17.567307397 podStartE2EDuration="56.256047137s" podCreationTimestamp="2025-12-02 18:48:18 +0000 UTC" firstStartedPulling="2025-12-02 18:48:33.758492806 +0000 UTC m=+744.531385124" lastFinishedPulling="2025-12-02 18:49:12.447232536 +0000 UTC m=+783.220124864" observedRunningTime="2025-12-02 18:49:14.248848952 +0000 UTC m=+785.021741330" watchObservedRunningTime="2025-12-02 18:49:14.256047137 +0000 UTC m=+785.028939505" Dec 02 18:49:14 crc kubenswrapper[4792]: I1202 18:49:14.273900 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9" podStartSLOduration=-9223371980.5809 podStartE2EDuration="56.273874674s" podCreationTimestamp="2025-12-02 18:48:18 +0000 UTC" firstStartedPulling="2025-12-02 18:48:35.887945229 +0000 UTC m=+746.660837557" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:49:14.26904879 +0000 UTC m=+785.041941168" watchObservedRunningTime="2025-12-02 18:49:14.273874674 +0000 UTC m=+785.046767042" Dec 02 18:49:19 crc kubenswrapper[4792]: I1202 18:49:19.511866 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-nqbpz" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.512493 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-qxjx2"] Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.513654 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.515594 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.515608 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-h624g" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.516987 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.517666 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-j9t7f"] Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.518646 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-j9t7f" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.526455 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-dr4mv" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.533183 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-j9t7f"] Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.536308 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-2lqc8"] Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.537006 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.584920 4792 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dbkmp" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.598567 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-qxjx2"] Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.604906 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-2lqc8"] Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.646776 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vdlm\" (UniqueName: \"kubernetes.io/projected/a125909d-e0cc-4e4d-ad34-361379b74bf4-kube-api-access-2vdlm\") pod \"cert-manager-webhook-5655c58dd6-2lqc8\" (UID: \"a125909d-e0cc-4e4d-ad34-361379b74bf4\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.646835 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gqnv\" (UniqueName: \"kubernetes.io/projected/99286d04-ee0d-49ca-84f4-4e7dd9fd9e76-kube-api-access-9gqnv\") pod \"cert-manager-cainjector-7f985d654d-qxjx2\" (UID: \"99286d04-ee0d-49ca-84f4-4e7dd9fd9e76\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.646858 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x54dr\" (UniqueName: \"kubernetes.io/projected/55936820-08a3-4569-a1c3-a2c8ff5ce620-kube-api-access-x54dr\") pod \"cert-manager-5b446d88c5-j9t7f\" (UID: \"55936820-08a3-4569-a1c3-a2c8ff5ce620\") " pod="cert-manager/cert-manager-5b446d88c5-j9t7f" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.747792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vdlm\" (UniqueName: \"kubernetes.io/projected/a125909d-e0cc-4e4d-ad34-361379b74bf4-kube-api-access-2vdlm\") pod \"cert-manager-webhook-5655c58dd6-2lqc8\" (UID: \"a125909d-e0cc-4e4d-ad34-361379b74bf4\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.747972 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gqnv\" (UniqueName: \"kubernetes.io/projected/99286d04-ee0d-49ca-84f4-4e7dd9fd9e76-kube-api-access-9gqnv\") pod \"cert-manager-cainjector-7f985d654d-qxjx2\" (UID: \"99286d04-ee0d-49ca-84f4-4e7dd9fd9e76\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.748240 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x54dr\" (UniqueName: \"kubernetes.io/projected/55936820-08a3-4569-a1c3-a2c8ff5ce620-kube-api-access-x54dr\") pod \"cert-manager-5b446d88c5-j9t7f\" (UID: \"55936820-08a3-4569-a1c3-a2c8ff5ce620\") " pod="cert-manager/cert-manager-5b446d88c5-j9t7f" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.767143 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vdlm\" (UniqueName: \"kubernetes.io/projected/a125909d-e0cc-4e4d-ad34-361379b74bf4-kube-api-access-2vdlm\") pod \"cert-manager-webhook-5655c58dd6-2lqc8\" (UID: \"a125909d-e0cc-4e4d-ad34-361379b74bf4\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.768239 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x54dr\" (UniqueName: \"kubernetes.io/projected/55936820-08a3-4569-a1c3-a2c8ff5ce620-kube-api-access-x54dr\") pod \"cert-manager-5b446d88c5-j9t7f\" (UID: \"55936820-08a3-4569-a1c3-a2c8ff5ce620\") " pod="cert-manager/cert-manager-5b446d88c5-j9t7f" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.773114 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gqnv\" (UniqueName: \"kubernetes.io/projected/99286d04-ee0d-49ca-84f4-4e7dd9fd9e76-kube-api-access-9gqnv\") pod \"cert-manager-cainjector-7f985d654d-qxjx2\" (UID: \"99286d04-ee0d-49ca-84f4-4e7dd9fd9e76\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.829376 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.836246 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-j9t7f" Dec 02 18:49:23 crc kubenswrapper[4792]: I1202 18:49:23.899180 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" Dec 02 18:49:24 crc kubenswrapper[4792]: I1202 18:49:24.137312 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-j9t7f"] Dec 02 18:49:24 crc kubenswrapper[4792]: I1202 18:49:24.205906 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-qxjx2"] Dec 02 18:49:24 crc kubenswrapper[4792]: I1202 18:49:24.302491 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-j9t7f" event={"ID":"55936820-08a3-4569-a1c3-a2c8ff5ce620","Type":"ContainerStarted","Data":"9477d444cd451b8e0a49bcbcc4409714421394ce0f158427643c7873a89b14b5"} Dec 02 18:49:24 crc kubenswrapper[4792]: I1202 18:49:24.303551 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" event={"ID":"99286d04-ee0d-49ca-84f4-4e7dd9fd9e76","Type":"ContainerStarted","Data":"e2371caf3780463356b81b801cea5d8cc89536bf795e7dd77b3fcb67094553ab"} Dec 02 18:49:24 crc kubenswrapper[4792]: I1202 18:49:24.372760 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-2lqc8"] Dec 02 18:49:24 crc kubenswrapper[4792]: W1202 18:49:24.374784 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda125909d_e0cc_4e4d_ad34_361379b74bf4.slice/crio-519bba5f463948d2ebd597c4d6cd91bf8dddcc3ad8b6bf8fc07722ee9f610a76 WatchSource:0}: Error finding container 519bba5f463948d2ebd597c4d6cd91bf8dddcc3ad8b6bf8fc07722ee9f610a76: Status 404 returned error can't find the container with id 519bba5f463948d2ebd597c4d6cd91bf8dddcc3ad8b6bf8fc07722ee9f610a76 Dec 02 18:49:25 crc kubenswrapper[4792]: I1202 18:49:25.313074 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" event={"ID":"a125909d-e0cc-4e4d-ad34-361379b74bf4","Type":"ContainerStarted","Data":"519bba5f463948d2ebd597c4d6cd91bf8dddcc3ad8b6bf8fc07722ee9f610a76"} Dec 02 18:49:38 crc kubenswrapper[4792]: I1202 18:49:38.081412 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:49:38 crc kubenswrapper[4792]: I1202 18:49:38.082264 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:49:38 crc kubenswrapper[4792]: I1202 18:49:38.082347 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:49:38 crc kubenswrapper[4792]: I1202 18:49:38.083214 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d504f78e5a425b84ad94074fbe4a535e7551511fbcfda776b0f6a3eefb2619a"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 18:49:38 crc kubenswrapper[4792]: I1202 18:49:38.083304 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://2d504f78e5a425b84ad94074fbe4a535e7551511fbcfda776b0f6a3eefb2619a" gracePeriod=600 Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.416493 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-j9t7f" event={"ID":"55936820-08a3-4569-a1c3-a2c8ff5ce620","Type":"ContainerStarted","Data":"bbf7b7ad1a8c8b19757310ad27a8522b0db419f225e3352522c26abd29d09698"} Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.420631 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="2d504f78e5a425b84ad94074fbe4a535e7551511fbcfda776b0f6a3eefb2619a" exitCode=0 Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.420703 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"2d504f78e5a425b84ad94074fbe4a535e7551511fbcfda776b0f6a3eefb2619a"} Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.420782 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"51777911bdc2fd4cba93567e9367b9a4b299d12dfa4458a4ce8aa3d35773b2ea"} Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.420813 4792 scope.go:117] "RemoveContainer" containerID="b375625f1c2078e924950cfc9619c063ebe0f56fdae68ff761f6603f5e189ffa" Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.422553 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" event={"ID":"99286d04-ee0d-49ca-84f4-4e7dd9fd9e76","Type":"ContainerStarted","Data":"1dae036aa12b441cd3f3caa649450b2d3555d96c6a9dd196fc0194ee82fc9a96"} Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.428081 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" event={"ID":"a125909d-e0cc-4e4d-ad34-361379b74bf4","Type":"ContainerStarted","Data":"e9d92266658a88483f7bfb9d990c7ed9e3ac6888518369cb7f4a0e8384e549b0"} Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.428267 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.459656 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-j9t7f" podStartSLOduration=3.177799057 podStartE2EDuration="16.459627616s" podCreationTimestamp="2025-12-02 18:49:23 +0000 UTC" firstStartedPulling="2025-12-02 18:49:24.193130932 +0000 UTC m=+794.966023260" lastFinishedPulling="2025-12-02 18:49:37.474959501 +0000 UTC m=+808.247851819" observedRunningTime="2025-12-02 18:49:39.446452988 +0000 UTC m=+810.219345356" watchObservedRunningTime="2025-12-02 18:49:39.459627616 +0000 UTC m=+810.232519944" Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.522312 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-qxjx2" podStartSLOduration=3.343790759 podStartE2EDuration="16.522278725s" podCreationTimestamp="2025-12-02 18:49:23 +0000 UTC" firstStartedPulling="2025-12-02 18:49:24.232434321 +0000 UTC m=+795.005326649" lastFinishedPulling="2025-12-02 18:49:37.410922287 +0000 UTC m=+808.183814615" observedRunningTime="2025-12-02 18:49:39.514138806 +0000 UTC m=+810.287031154" watchObservedRunningTime="2025-12-02 18:49:39.522278725 +0000 UTC m=+810.295171083" Dec 02 18:49:39 crc kubenswrapper[4792]: I1202 18:49:39.546258 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" podStartSLOduration=3.512490801 podStartE2EDuration="16.54624152s" podCreationTimestamp="2025-12-02 18:49:23 +0000 UTC" firstStartedPulling="2025-12-02 18:49:24.376448769 +0000 UTC m=+795.149341097" lastFinishedPulling="2025-12-02 18:49:37.410199488 +0000 UTC m=+808.183091816" observedRunningTime="2025-12-02 18:49:39.536850299 +0000 UTC m=+810.309742637" watchObservedRunningTime="2025-12-02 18:49:39.54624152 +0000 UTC m=+810.319133858" Dec 02 18:49:43 crc kubenswrapper[4792]: I1202 18:49:43.904049 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-2lqc8" Dec 02 18:50:09 crc kubenswrapper[4792]: I1202 18:50:09.883327 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx"] Dec 02 18:50:09 crc kubenswrapper[4792]: I1202 18:50:09.885169 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:09 crc kubenswrapper[4792]: I1202 18:50:09.890827 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 18:50:09 crc kubenswrapper[4792]: I1202 18:50:09.905845 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx"] Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.066242 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-util\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.066430 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-bundle\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.066494 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7dh9\" (UniqueName: \"kubernetes.io/projected/22b8caed-5376-403f-bb65-e3a12cf9c7af-kube-api-access-q7dh9\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.168433 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-util\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.168592 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-bundle\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.168632 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7dh9\" (UniqueName: \"kubernetes.io/projected/22b8caed-5376-403f-bb65-e3a12cf9c7af-kube-api-access-q7dh9\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.169019 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-util\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.169336 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-bundle\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.191193 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7dh9\" (UniqueName: \"kubernetes.io/projected/22b8caed-5376-403f-bb65-e3a12cf9c7af-kube-api-access-q7dh9\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.200640 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:10 crc kubenswrapper[4792]: I1202 18:50:10.749694 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx"] Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.711470 4792 generic.go:334] "Generic (PLEG): container finished" podID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerID="dcd94cbe99914e465a8aea14a84c94c22bae472fc7f07417e8438a40effe16d7" exitCode=0 Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.711670 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" event={"ID":"22b8caed-5376-403f-bb65-e3a12cf9c7af","Type":"ContainerDied","Data":"dcd94cbe99914e465a8aea14a84c94c22bae472fc7f07417e8438a40effe16d7"} Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.715977 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" event={"ID":"22b8caed-5376-403f-bb65-e3a12cf9c7af","Type":"ContainerStarted","Data":"e18dbd9a7951236996843c62f5104a6953c148460585237c49592407d9c160f3"} Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.756292 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.757777 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.761624 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.761623 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.790674 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.911920 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftfhk\" (UniqueName: \"kubernetes.io/projected/2fec0142-128b-4ada-b6bf-c520bbc4a419-kube-api-access-ftfhk\") pod \"minio\" (UID: \"2fec0142-128b-4ada-b6bf-c520bbc4a419\") " pod="minio-dev/minio" Dec 02 18:50:11 crc kubenswrapper[4792]: I1202 18:50:11.912456 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\") pod \"minio\" (UID: \"2fec0142-128b-4ada-b6bf-c520bbc4a419\") " pod="minio-dev/minio" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.014788 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\") pod \"minio\" (UID: \"2fec0142-128b-4ada-b6bf-c520bbc4a419\") " pod="minio-dev/minio" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.014953 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftfhk\" (UniqueName: \"kubernetes.io/projected/2fec0142-128b-4ada-b6bf-c520bbc4a419-kube-api-access-ftfhk\") pod \"minio\" (UID: \"2fec0142-128b-4ada-b6bf-c520bbc4a419\") " pod="minio-dev/minio" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.019187 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.019329 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\") pod \"minio\" (UID: \"2fec0142-128b-4ada-b6bf-c520bbc4a419\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/18f1ec44bf303bc9c51bfe8916f9cb82764514fbb4847f7571381bc81b0e4e84/globalmount\"" pod="minio-dev/minio" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.046366 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftfhk\" (UniqueName: \"kubernetes.io/projected/2fec0142-128b-4ada-b6bf-c520bbc4a419-kube-api-access-ftfhk\") pod \"minio\" (UID: \"2fec0142-128b-4ada-b6bf-c520bbc4a419\") " pod="minio-dev/minio" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.072724 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69d92f4d-b8a4-4013-a5ac-fa7ffc3b74da\") pod \"minio\" (UID: \"2fec0142-128b-4ada-b6bf-c520bbc4a419\") " pod="minio-dev/minio" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.100113 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.252231 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2cck7"] Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.253818 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.272694 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2cck7"] Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.421640 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-utilities\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.421712 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-catalog-content\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.421746 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkttn\" (UniqueName: \"kubernetes.io/projected/a2971010-22cd-41d1-ad4c-663fe1f1f31b-kube-api-access-jkttn\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.522945 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-utilities\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.523403 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-catalog-content\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.523433 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkttn\" (UniqueName: \"kubernetes.io/projected/a2971010-22cd-41d1-ad4c-663fe1f1f31b-kube-api-access-jkttn\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.523680 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-utilities\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.523978 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-catalog-content\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.551151 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkttn\" (UniqueName: \"kubernetes.io/projected/a2971010-22cd-41d1-ad4c-663fe1f1f31b-kube-api-access-jkttn\") pod \"redhat-operators-2cck7\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.585077 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.622275 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 02 18:50:12 crc kubenswrapper[4792]: W1202 18:50:12.641680 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fec0142_128b_4ada_b6bf_c520bbc4a419.slice/crio-9de965d697d03dbab307baba9bd638c83fbbde7eefb37cdf0b1e54f702b52824 WatchSource:0}: Error finding container 9de965d697d03dbab307baba9bd638c83fbbde7eefb37cdf0b1e54f702b52824: Status 404 returned error can't find the container with id 9de965d697d03dbab307baba9bd638c83fbbde7eefb37cdf0b1e54f702b52824 Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.727805 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" event={"ID":"22b8caed-5376-403f-bb65-e3a12cf9c7af","Type":"ContainerStarted","Data":"75ba0f5e144ac7b134df76a126563e6d19e546b7c285a545211ba9925f20b277"} Dec 02 18:50:12 crc kubenswrapper[4792]: I1202 18:50:12.734968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"2fec0142-128b-4ada-b6bf-c520bbc4a419","Type":"ContainerStarted","Data":"9de965d697d03dbab307baba9bd638c83fbbde7eefb37cdf0b1e54f702b52824"} Dec 02 18:50:13 crc kubenswrapper[4792]: I1202 18:50:13.006449 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2cck7"] Dec 02 18:50:13 crc kubenswrapper[4792]: W1202 18:50:13.015400 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2971010_22cd_41d1_ad4c_663fe1f1f31b.slice/crio-d9879b5491f71c4dfd7881811fecbccb2b02eb6dc4d03d142ebb84cde90ab3f0 WatchSource:0}: Error finding container d9879b5491f71c4dfd7881811fecbccb2b02eb6dc4d03d142ebb84cde90ab3f0: Status 404 returned error can't find the container with id d9879b5491f71c4dfd7881811fecbccb2b02eb6dc4d03d142ebb84cde90ab3f0 Dec 02 18:50:13 crc kubenswrapper[4792]: I1202 18:50:13.744376 4792 generic.go:334] "Generic (PLEG): container finished" podID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerID="75ba0f5e144ac7b134df76a126563e6d19e546b7c285a545211ba9925f20b277" exitCode=0 Dec 02 18:50:13 crc kubenswrapper[4792]: I1202 18:50:13.744578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" event={"ID":"22b8caed-5376-403f-bb65-e3a12cf9c7af","Type":"ContainerDied","Data":"75ba0f5e144ac7b134df76a126563e6d19e546b7c285a545211ba9925f20b277"} Dec 02 18:50:13 crc kubenswrapper[4792]: I1202 18:50:13.746772 4792 generic.go:334] "Generic (PLEG): container finished" podID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerID="ef6419ded340f528047a7c1c341e780dd094b828bf736d6d403589ce838bcbc2" exitCode=0 Dec 02 18:50:13 crc kubenswrapper[4792]: I1202 18:50:13.746802 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2cck7" event={"ID":"a2971010-22cd-41d1-ad4c-663fe1f1f31b","Type":"ContainerDied","Data":"ef6419ded340f528047a7c1c341e780dd094b828bf736d6d403589ce838bcbc2"} Dec 02 18:50:13 crc kubenswrapper[4792]: I1202 18:50:13.746822 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2cck7" event={"ID":"a2971010-22cd-41d1-ad4c-663fe1f1f31b","Type":"ContainerStarted","Data":"d9879b5491f71c4dfd7881811fecbccb2b02eb6dc4d03d142ebb84cde90ab3f0"} Dec 02 18:50:17 crc kubenswrapper[4792]: I1202 18:50:17.779976 4792 generic.go:334] "Generic (PLEG): container finished" podID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerID="e2b7dc9a259945bc1484e3648a4f9424a89059c8a25aa6ffecbcdb203815b1e1" exitCode=0 Dec 02 18:50:17 crc kubenswrapper[4792]: I1202 18:50:17.780093 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" event={"ID":"22b8caed-5376-403f-bb65-e3a12cf9c7af","Type":"ContainerDied","Data":"e2b7dc9a259945bc1484e3648a4f9424a89059c8a25aa6ffecbcdb203815b1e1"} Dec 02 18:50:17 crc kubenswrapper[4792]: I1202 18:50:17.783502 4792 generic.go:334] "Generic (PLEG): container finished" podID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerID="60e6ca1d6813a5623201c6c321fe1611d87ca6d00334fad3712ed5a1d358864c" exitCode=0 Dec 02 18:50:17 crc kubenswrapper[4792]: I1202 18:50:17.783651 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2cck7" event={"ID":"a2971010-22cd-41d1-ad4c-663fe1f1f31b","Type":"ContainerDied","Data":"60e6ca1d6813a5623201c6c321fe1611d87ca6d00334fad3712ed5a1d358864c"} Dec 02 18:50:17 crc kubenswrapper[4792]: I1202 18:50:17.785573 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"2fec0142-128b-4ada-b6bf-c520bbc4a419","Type":"ContainerStarted","Data":"61dfa20890a9eb3e7028bd797d356b58cb9be721808692e3e6edc71be7feff10"} Dec 02 18:50:17 crc kubenswrapper[4792]: I1202 18:50:17.848126 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.593867167 podStartE2EDuration="8.848098049s" podCreationTimestamp="2025-12-02 18:50:09 +0000 UTC" firstStartedPulling="2025-12-02 18:50:12.66116929 +0000 UTC m=+843.434061618" lastFinishedPulling="2025-12-02 18:50:16.915400172 +0000 UTC m=+847.688292500" observedRunningTime="2025-12-02 18:50:17.844413614 +0000 UTC m=+848.617305952" watchObservedRunningTime="2025-12-02 18:50:17.848098049 +0000 UTC m=+848.620990417" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.447188 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x8tcx"] Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.450490 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.463382 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x8tcx"] Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.648707 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-catalog-content\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.648814 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjpb4\" (UniqueName: \"kubernetes.io/projected/3b941b5a-8a0d-47f8-a37a-9287576ea270-kube-api-access-sjpb4\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.648842 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-utilities\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.751097 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-catalog-content\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.751249 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-utilities\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.751274 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjpb4\" (UniqueName: \"kubernetes.io/projected/3b941b5a-8a0d-47f8-a37a-9287576ea270-kube-api-access-sjpb4\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.752125 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-utilities\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.752216 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-catalog-content\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.790261 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjpb4\" (UniqueName: \"kubernetes.io/projected/3b941b5a-8a0d-47f8-a37a-9287576ea270-kube-api-access-sjpb4\") pod \"community-operators-x8tcx\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.803875 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2cck7" event={"ID":"a2971010-22cd-41d1-ad4c-663fe1f1f31b","Type":"ContainerStarted","Data":"78b1117f06cac79e812bbd82577ab219a25d57c80b6358f578df412c8113a075"} Dec 02 18:50:18 crc kubenswrapper[4792]: I1202 18:50:18.826337 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2cck7" podStartSLOduration=2.855502547 podStartE2EDuration="6.826316354s" podCreationTimestamp="2025-12-02 18:50:12 +0000 UTC" firstStartedPulling="2025-12-02 18:50:14.227695718 +0000 UTC m=+845.000588046" lastFinishedPulling="2025-12-02 18:50:18.198509525 +0000 UTC m=+848.971401853" observedRunningTime="2025-12-02 18:50:18.822386563 +0000 UTC m=+849.595278911" watchObservedRunningTime="2025-12-02 18:50:18.826316354 +0000 UTC m=+849.599208692" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.053446 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.078814 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.156123 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-util\") pod \"22b8caed-5376-403f-bb65-e3a12cf9c7af\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.156365 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-bundle\") pod \"22b8caed-5376-403f-bb65-e3a12cf9c7af\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.156411 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7dh9\" (UniqueName: \"kubernetes.io/projected/22b8caed-5376-403f-bb65-e3a12cf9c7af-kube-api-access-q7dh9\") pod \"22b8caed-5376-403f-bb65-e3a12cf9c7af\" (UID: \"22b8caed-5376-403f-bb65-e3a12cf9c7af\") " Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.157201 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-bundle" (OuterVolumeSpecName: "bundle") pod "22b8caed-5376-403f-bb65-e3a12cf9c7af" (UID: "22b8caed-5376-403f-bb65-e3a12cf9c7af"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.163699 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22b8caed-5376-403f-bb65-e3a12cf9c7af-kube-api-access-q7dh9" (OuterVolumeSpecName: "kube-api-access-q7dh9") pod "22b8caed-5376-403f-bb65-e3a12cf9c7af" (UID: "22b8caed-5376-403f-bb65-e3a12cf9c7af"). InnerVolumeSpecName "kube-api-access-q7dh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.172055 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-util" (OuterVolumeSpecName: "util") pod "22b8caed-5376-403f-bb65-e3a12cf9c7af" (UID: "22b8caed-5376-403f-bb65-e3a12cf9c7af"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.257909 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.257947 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7dh9\" (UniqueName: \"kubernetes.io/projected/22b8caed-5376-403f-bb65-e3a12cf9c7af-kube-api-access-q7dh9\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.257961 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/22b8caed-5376-403f-bb65-e3a12cf9c7af-util\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.648426 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x8tcx"] Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.808946 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" event={"ID":"22b8caed-5376-403f-bb65-e3a12cf9c7af","Type":"ContainerDied","Data":"e18dbd9a7951236996843c62f5104a6953c148460585237c49592407d9c160f3"} Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.808999 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e18dbd9a7951236996843c62f5104a6953c148460585237c49592407d9c160f3" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.809039 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx" Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.810943 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerStarted","Data":"d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7"} Dec 02 18:50:19 crc kubenswrapper[4792]: I1202 18:50:19.811020 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerStarted","Data":"29888a6d0cd07967d6cb0212c4f684e1e65c89dbdb79c2fc6707917cd8fcf7d7"} Dec 02 18:50:20 crc kubenswrapper[4792]: I1202 18:50:20.817764 4792 generic.go:334] "Generic (PLEG): container finished" podID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerID="d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7" exitCode=0 Dec 02 18:50:20 crc kubenswrapper[4792]: I1202 18:50:20.817841 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerDied","Data":"d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7"} Dec 02 18:50:21 crc kubenswrapper[4792]: I1202 18:50:21.845731 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerStarted","Data":"6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702"} Dec 02 18:50:22 crc kubenswrapper[4792]: I1202 18:50:22.586038 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:22 crc kubenswrapper[4792]: I1202 18:50:22.586420 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:22 crc kubenswrapper[4792]: I1202 18:50:22.853156 4792 generic.go:334] "Generic (PLEG): container finished" podID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerID="6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702" exitCode=0 Dec 02 18:50:22 crc kubenswrapper[4792]: I1202 18:50:22.853206 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerDied","Data":"6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702"} Dec 02 18:50:23 crc kubenswrapper[4792]: I1202 18:50:23.687620 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2cck7" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="registry-server" probeResult="failure" output=< Dec 02 18:50:23 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 18:50:23 crc kubenswrapper[4792]: > Dec 02 18:50:24 crc kubenswrapper[4792]: I1202 18:50:24.874922 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerStarted","Data":"ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310"} Dec 02 18:50:24 crc kubenswrapper[4792]: I1202 18:50:24.895870 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x8tcx" podStartSLOduration=3.4808644060000002 podStartE2EDuration="6.895841973s" podCreationTimestamp="2025-12-02 18:50:18 +0000 UTC" firstStartedPulling="2025-12-02 18:50:20.820314047 +0000 UTC m=+851.593206375" lastFinishedPulling="2025-12-02 18:50:24.235291614 +0000 UTC m=+855.008183942" observedRunningTime="2025-12-02 18:50:24.894137759 +0000 UTC m=+855.667030097" watchObservedRunningTime="2025-12-02 18:50:24.895841973 +0000 UTC m=+855.668734341" Dec 02 18:50:29 crc kubenswrapper[4792]: I1202 18:50:29.079832 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:29 crc kubenswrapper[4792]: I1202 18:50:29.080326 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:29 crc kubenswrapper[4792]: I1202 18:50:29.126778 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:29 crc kubenswrapper[4792]: I1202 18:50:29.990300 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:31 crc kubenswrapper[4792]: I1202 18:50:31.432938 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x8tcx"] Dec 02 18:50:31 crc kubenswrapper[4792]: I1202 18:50:31.921915 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x8tcx" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="registry-server" containerID="cri-o://ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310" gracePeriod=2 Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.321213 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351028 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw"] Dec 02 18:50:32 crc kubenswrapper[4792]: E1202 18:50:32.351260 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerName="pull" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351273 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerName="pull" Dec 02 18:50:32 crc kubenswrapper[4792]: E1202 18:50:32.351280 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="extract-content" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351286 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="extract-content" Dec 02 18:50:32 crc kubenswrapper[4792]: E1202 18:50:32.351298 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerName="util" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351303 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerName="util" Dec 02 18:50:32 crc kubenswrapper[4792]: E1202 18:50:32.351319 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="registry-server" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351324 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="registry-server" Dec 02 18:50:32 crc kubenswrapper[4792]: E1202 18:50:32.351334 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerName="extract" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351342 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerName="extract" Dec 02 18:50:32 crc kubenswrapper[4792]: E1202 18:50:32.351353 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="extract-utilities" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351359 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="extract-utilities" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351447 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerName="registry-server" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.351459 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="22b8caed-5376-403f-bb65-e3a12cf9c7af" containerName="extract" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.352207 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.354084 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.409433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.409632 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.409743 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw"] Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.409746 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w6tr\" (UniqueName: \"kubernetes.io/projected/19402c55-9b6f-4486-a3ce-e6971e5da081-kube-api-access-6w6tr\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510125 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-catalog-content\") pod \"3b941b5a-8a0d-47f8-a37a-9287576ea270\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510178 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-utilities\") pod \"3b941b5a-8a0d-47f8-a37a-9287576ea270\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510202 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjpb4\" (UniqueName: \"kubernetes.io/projected/3b941b5a-8a0d-47f8-a37a-9287576ea270-kube-api-access-sjpb4\") pod \"3b941b5a-8a0d-47f8-a37a-9287576ea270\" (UID: \"3b941b5a-8a0d-47f8-a37a-9287576ea270\") " Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510317 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510357 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w6tr\" (UniqueName: \"kubernetes.io/projected/19402c55-9b6f-4486-a3ce-e6971e5da081-kube-api-access-6w6tr\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510406 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510819 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.510961 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.511338 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-utilities" (OuterVolumeSpecName: "utilities") pod "3b941b5a-8a0d-47f8-a37a-9287576ea270" (UID: "3b941b5a-8a0d-47f8-a37a-9287576ea270"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.518743 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b941b5a-8a0d-47f8-a37a-9287576ea270-kube-api-access-sjpb4" (OuterVolumeSpecName: "kube-api-access-sjpb4") pod "3b941b5a-8a0d-47f8-a37a-9287576ea270" (UID: "3b941b5a-8a0d-47f8-a37a-9287576ea270"). InnerVolumeSpecName "kube-api-access-sjpb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.534212 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w6tr\" (UniqueName: \"kubernetes.io/projected/19402c55-9b6f-4486-a3ce-e6971e5da081-kube-api-access-6w6tr\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.579857 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b941b5a-8a0d-47f8-a37a-9287576ea270" (UID: "3b941b5a-8a0d-47f8-a37a-9287576ea270"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.611209 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.611260 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b941b5a-8a0d-47f8-a37a-9287576ea270-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.611378 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjpb4\" (UniqueName: \"kubernetes.io/projected/3b941b5a-8a0d-47f8-a37a-9287576ea270-kube-api-access-sjpb4\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.633435 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.667618 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.677956 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.912130 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw"] Dec 02 18:50:32 crc kubenswrapper[4792]: W1202 18:50:32.913831 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19402c55_9b6f_4486_a3ce_e6971e5da081.slice/crio-841521215c9f3abd7130472b65db578bb59589b44c1672c93e66c25f8003649d WatchSource:0}: Error finding container 841521215c9f3abd7130472b65db578bb59589b44c1672c93e66c25f8003649d: Status 404 returned error can't find the container with id 841521215c9f3abd7130472b65db578bb59589b44c1672c93e66c25f8003649d Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.934387 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" event={"ID":"19402c55-9b6f-4486-a3ce-e6971e5da081","Type":"ContainerStarted","Data":"841521215c9f3abd7130472b65db578bb59589b44c1672c93e66c25f8003649d"} Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.936654 4792 generic.go:334] "Generic (PLEG): container finished" podID="3b941b5a-8a0d-47f8-a37a-9287576ea270" containerID="ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310" exitCode=0 Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.936725 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x8tcx" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.936715 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerDied","Data":"ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310"} Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.936783 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x8tcx" event={"ID":"3b941b5a-8a0d-47f8-a37a-9287576ea270","Type":"ContainerDied","Data":"29888a6d0cd07967d6cb0212c4f684e1e65c89dbdb79c2fc6707917cd8fcf7d7"} Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.936811 4792 scope.go:117] "RemoveContainer" containerID="ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.971721 4792 scope.go:117] "RemoveContainer" containerID="6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702" Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.974780 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x8tcx"] Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.979600 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x8tcx"] Dec 02 18:50:32 crc kubenswrapper[4792]: I1202 18:50:32.991344 4792 scope.go:117] "RemoveContainer" containerID="d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.003691 4792 scope.go:117] "RemoveContainer" containerID="ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310" Dec 02 18:50:33 crc kubenswrapper[4792]: E1202 18:50:33.004281 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310\": container with ID starting with ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310 not found: ID does not exist" containerID="ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.004325 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310"} err="failed to get container status \"ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310\": rpc error: code = NotFound desc = could not find container \"ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310\": container with ID starting with ae1a3fe4c147307f3d4f9a23bc2ba1e6d1139337930ae2ace4c5fa9deec4f310 not found: ID does not exist" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.004352 4792 scope.go:117] "RemoveContainer" containerID="6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702" Dec 02 18:50:33 crc kubenswrapper[4792]: E1202 18:50:33.004712 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702\": container with ID starting with 6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702 not found: ID does not exist" containerID="6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.004747 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702"} err="failed to get container status \"6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702\": rpc error: code = NotFound desc = could not find container \"6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702\": container with ID starting with 6085390b6422ed40da3a0ae68746d0518e6bf8bcd2f8886c74a1f87363c1d702 not found: ID does not exist" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.004776 4792 scope.go:117] "RemoveContainer" containerID="d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7" Dec 02 18:50:33 crc kubenswrapper[4792]: E1202 18:50:33.005172 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7\": container with ID starting with d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7 not found: ID does not exist" containerID="d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.005194 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7"} err="failed to get container status \"d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7\": rpc error: code = NotFound desc = could not find container \"d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7\": container with ID starting with d4d7034ce21a0e174c711dc82be897995eb308dcf0a327dcfc7a0cd82c6c24c7 not found: ID does not exist" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.237035 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s"] Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.237866 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.241107 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.241135 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.243595 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.243760 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.243797 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-mfj94" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.243947 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.264121 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s"] Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.422052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-webhook-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.422189 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qpnz\" (UniqueName: \"kubernetes.io/projected/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-kube-api-access-9qpnz\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.422249 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-manager-config\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.422268 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.422343 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-apiservice-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.523694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qpnz\" (UniqueName: \"kubernetes.io/projected/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-kube-api-access-9qpnz\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.523784 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-manager-config\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.523813 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.523864 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-apiservice-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.523910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-webhook-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.524872 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-manager-config\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.531166 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.534063 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-apiservice-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.544389 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-webhook-cert\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.547139 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b941b5a-8a0d-47f8-a37a-9287576ea270" path="/var/lib/kubelet/pods/3b941b5a-8a0d-47f8-a37a-9287576ea270/volumes" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.560381 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qpnz\" (UniqueName: \"kubernetes.io/projected/0ea8c233-1ba5-435d-a7e8-93d9d055fe7b-kube-api-access-9qpnz\") pod \"loki-operator-controller-manager-7fd7bc68c8-q7j7s\" (UID: \"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b\") " pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.851001 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.949360 4792 generic.go:334] "Generic (PLEG): container finished" podID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerID="b2da20a6a5622bbebcc743a3b9de004dd5bf495438f0da5c3ca53f3236236cee" exitCode=0 Dec 02 18:50:33 crc kubenswrapper[4792]: I1202 18:50:33.949439 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" event={"ID":"19402c55-9b6f-4486-a3ce-e6971e5da081","Type":"ContainerDied","Data":"b2da20a6a5622bbebcc743a3b9de004dd5bf495438f0da5c3ca53f3236236cee"} Dec 02 18:50:34 crc kubenswrapper[4792]: I1202 18:50:34.126269 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s"] Dec 02 18:50:34 crc kubenswrapper[4792]: I1202 18:50:34.960905 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" event={"ID":"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b","Type":"ContainerStarted","Data":"b57f82a389f1bb4d209fce17f844f2590492bae5f880e34904786a6c31d3d00f"} Dec 02 18:50:35 crc kubenswrapper[4792]: I1202 18:50:35.631011 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2cck7"] Dec 02 18:50:35 crc kubenswrapper[4792]: I1202 18:50:35.631335 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2cck7" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="registry-server" containerID="cri-o://78b1117f06cac79e812bbd82577ab219a25d57c80b6358f578df412c8113a075" gracePeriod=2 Dec 02 18:50:35 crc kubenswrapper[4792]: I1202 18:50:35.972576 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" event={"ID":"19402c55-9b6f-4486-a3ce-e6971e5da081","Type":"ContainerDied","Data":"eeea2fdba8fff1f26985515921fbe0707ea110aa7954897f0cc591a8ec070604"} Dec 02 18:50:35 crc kubenswrapper[4792]: I1202 18:50:35.972962 4792 generic.go:334] "Generic (PLEG): container finished" podID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerID="eeea2fdba8fff1f26985515921fbe0707ea110aa7954897f0cc591a8ec070604" exitCode=0 Dec 02 18:50:35 crc kubenswrapper[4792]: I1202 18:50:35.975953 4792 generic.go:334] "Generic (PLEG): container finished" podID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerID="78b1117f06cac79e812bbd82577ab219a25d57c80b6358f578df412c8113a075" exitCode=0 Dec 02 18:50:35 crc kubenswrapper[4792]: I1202 18:50:35.976008 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2cck7" event={"ID":"a2971010-22cd-41d1-ad4c-663fe1f1f31b","Type":"ContainerDied","Data":"78b1117f06cac79e812bbd82577ab219a25d57c80b6358f578df412c8113a075"} Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.103664 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.158594 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkttn\" (UniqueName: \"kubernetes.io/projected/a2971010-22cd-41d1-ad4c-663fe1f1f31b-kube-api-access-jkttn\") pod \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.158681 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-utilities\") pod \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.158702 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-catalog-content\") pod \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\" (UID: \"a2971010-22cd-41d1-ad4c-663fe1f1f31b\") " Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.160394 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-utilities" (OuterVolumeSpecName: "utilities") pod "a2971010-22cd-41d1-ad4c-663fe1f1f31b" (UID: "a2971010-22cd-41d1-ad4c-663fe1f1f31b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.175508 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2971010-22cd-41d1-ad4c-663fe1f1f31b-kube-api-access-jkttn" (OuterVolumeSpecName: "kube-api-access-jkttn") pod "a2971010-22cd-41d1-ad4c-663fe1f1f31b" (UID: "a2971010-22cd-41d1-ad4c-663fe1f1f31b"). InnerVolumeSpecName "kube-api-access-jkttn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.261579 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.261615 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkttn\" (UniqueName: \"kubernetes.io/projected/a2971010-22cd-41d1-ad4c-663fe1f1f31b-kube-api-access-jkttn\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.270000 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2971010-22cd-41d1-ad4c-663fe1f1f31b" (UID: "a2971010-22cd-41d1-ad4c-663fe1f1f31b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.363299 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2971010-22cd-41d1-ad4c-663fe1f1f31b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.996630 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2cck7" event={"ID":"a2971010-22cd-41d1-ad4c-663fe1f1f31b","Type":"ContainerDied","Data":"d9879b5491f71c4dfd7881811fecbccb2b02eb6dc4d03d142ebb84cde90ab3f0"} Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.996876 4792 scope.go:117] "RemoveContainer" containerID="78b1117f06cac79e812bbd82577ab219a25d57c80b6358f578df412c8113a075" Dec 02 18:50:36 crc kubenswrapper[4792]: I1202 18:50:36.996902 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2cck7" Dec 02 18:50:37 crc kubenswrapper[4792]: I1202 18:50:36.999717 4792 generic.go:334] "Generic (PLEG): container finished" podID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerID="9219f45b3188011cef2d91ae6da23ff262bd7f0800ba66171e37ed41d50961ca" exitCode=0 Dec 02 18:50:37 crc kubenswrapper[4792]: I1202 18:50:36.999761 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" event={"ID":"19402c55-9b6f-4486-a3ce-e6971e5da081","Type":"ContainerDied","Data":"9219f45b3188011cef2d91ae6da23ff262bd7f0800ba66171e37ed41d50961ca"} Dec 02 18:50:37 crc kubenswrapper[4792]: I1202 18:50:37.040619 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2cck7"] Dec 02 18:50:37 crc kubenswrapper[4792]: I1202 18:50:37.048853 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2cck7"] Dec 02 18:50:37 crc kubenswrapper[4792]: I1202 18:50:37.558213 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" path="/var/lib/kubelet/pods/a2971010-22cd-41d1-ad4c-663fe1f1f31b/volumes" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.284497 4792 scope.go:117] "RemoveContainer" containerID="60e6ca1d6813a5623201c6c321fe1611d87ca6d00334fad3712ed5a1d358864c" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.332368 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.345592 4792 scope.go:117] "RemoveContainer" containerID="ef6419ded340f528047a7c1c341e780dd094b828bf736d6d403589ce838bcbc2" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.415716 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-bundle\") pod \"19402c55-9b6f-4486-a3ce-e6971e5da081\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.415797 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-util\") pod \"19402c55-9b6f-4486-a3ce-e6971e5da081\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.415822 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w6tr\" (UniqueName: \"kubernetes.io/projected/19402c55-9b6f-4486-a3ce-e6971e5da081-kube-api-access-6w6tr\") pod \"19402c55-9b6f-4486-a3ce-e6971e5da081\" (UID: \"19402c55-9b6f-4486-a3ce-e6971e5da081\") " Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.418493 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-bundle" (OuterVolumeSpecName: "bundle") pod "19402c55-9b6f-4486-a3ce-e6971e5da081" (UID: "19402c55-9b6f-4486-a3ce-e6971e5da081"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.419833 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19402c55-9b6f-4486-a3ce-e6971e5da081-kube-api-access-6w6tr" (OuterVolumeSpecName: "kube-api-access-6w6tr") pod "19402c55-9b6f-4486-a3ce-e6971e5da081" (UID: "19402c55-9b6f-4486-a3ce-e6971e5da081"). InnerVolumeSpecName "kube-api-access-6w6tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.449052 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-util" (OuterVolumeSpecName: "util") pod "19402c55-9b6f-4486-a3ce-e6971e5da081" (UID: "19402c55-9b6f-4486-a3ce-e6971e5da081"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.518089 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.518142 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19402c55-9b6f-4486-a3ce-e6971e5da081-util\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:38 crc kubenswrapper[4792]: I1202 18:50:38.518158 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w6tr\" (UniqueName: \"kubernetes.io/projected/19402c55-9b6f-4486-a3ce-e6971e5da081-kube-api-access-6w6tr\") on node \"crc\" DevicePath \"\"" Dec 02 18:50:39 crc kubenswrapper[4792]: I1202 18:50:39.024279 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" event={"ID":"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b","Type":"ContainerStarted","Data":"7a26a88d7a04e473af7e793756a81b9f4753b13e2b650361a08e802b6cc0185b"} Dec 02 18:50:39 crc kubenswrapper[4792]: I1202 18:50:39.028431 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" event={"ID":"19402c55-9b6f-4486-a3ce-e6971e5da081","Type":"ContainerDied","Data":"841521215c9f3abd7130472b65db578bb59589b44c1672c93e66c25f8003649d"} Dec 02 18:50:39 crc kubenswrapper[4792]: I1202 18:50:39.028463 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="841521215c9f3abd7130472b65db578bb59589b44c1672c93e66c25f8003649d" Dec 02 18:50:39 crc kubenswrapper[4792]: I1202 18:50:39.028582 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw" Dec 02 18:50:45 crc kubenswrapper[4792]: I1202 18:50:45.070828 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" event={"ID":"0ea8c233-1ba5-435d-a7e8-93d9d055fe7b","Type":"ContainerStarted","Data":"547d86b9539dae48fd0a68e4276258ad9b1f388a65b4b92f995a9cf0b43c0c80"} Dec 02 18:50:45 crc kubenswrapper[4792]: I1202 18:50:45.072017 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:45 crc kubenswrapper[4792]: I1202 18:50:45.073355 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" Dec 02 18:50:45 crc kubenswrapper[4792]: I1202 18:50:45.104923 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-7fd7bc68c8-q7j7s" podStartSLOduration=1.686214393 podStartE2EDuration="12.104897883s" podCreationTimestamp="2025-12-02 18:50:33 +0000 UTC" firstStartedPulling="2025-12-02 18:50:34.138075171 +0000 UTC m=+864.910967499" lastFinishedPulling="2025-12-02 18:50:44.556758661 +0000 UTC m=+875.329650989" observedRunningTime="2025-12-02 18:50:45.101287661 +0000 UTC m=+875.874179999" watchObservedRunningTime="2025-12-02 18:50:45.104897883 +0000 UTC m=+875.877790251" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.952380 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp"] Dec 02 18:51:09 crc kubenswrapper[4792]: E1202 18:51:09.954182 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="registry-server" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.954266 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="registry-server" Dec 02 18:51:09 crc kubenswrapper[4792]: E1202 18:51:09.954326 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="extract-content" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.954378 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="extract-content" Dec 02 18:51:09 crc kubenswrapper[4792]: E1202 18:51:09.954434 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerName="extract" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.954485 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerName="extract" Dec 02 18:51:09 crc kubenswrapper[4792]: E1202 18:51:09.954566 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="extract-utilities" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.954617 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="extract-utilities" Dec 02 18:51:09 crc kubenswrapper[4792]: E1202 18:51:09.954680 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerName="util" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.954733 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerName="util" Dec 02 18:51:09 crc kubenswrapper[4792]: E1202 18:51:09.954784 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerName="pull" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.954859 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerName="pull" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.955014 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="19402c55-9b6f-4486-a3ce-e6971e5da081" containerName="extract" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.955074 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2971010-22cd-41d1-ad4c-663fe1f1f31b" containerName="registry-server" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.955925 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.959903 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 18:51:09 crc kubenswrapper[4792]: I1202 18:51:09.973237 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp"] Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.056689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.056812 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6t4m\" (UniqueName: \"kubernetes.io/projected/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-kube-api-access-c6t4m\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.056918 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.158428 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.158514 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6t4m\" (UniqueName: \"kubernetes.io/projected/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-kube-api-access-c6t4m\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.158593 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.159383 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.159962 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.187330 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6t4m\" (UniqueName: \"kubernetes.io/projected/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-kube-api-access-c6t4m\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.273703 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:10 crc kubenswrapper[4792]: I1202 18:51:10.802759 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp"] Dec 02 18:51:10 crc kubenswrapper[4792]: W1202 18:51:10.810750 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63ea8a4a_8b04_44f4_a0a8_4767d02973bb.slice/crio-3a902a808fd15297819bf67ad1ab84c434f8514facfc37cdabbbdfa679dcd5ce WatchSource:0}: Error finding container 3a902a808fd15297819bf67ad1ab84c434f8514facfc37cdabbbdfa679dcd5ce: Status 404 returned error can't find the container with id 3a902a808fd15297819bf67ad1ab84c434f8514facfc37cdabbbdfa679dcd5ce Dec 02 18:51:11 crc kubenswrapper[4792]: I1202 18:51:11.277927 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" event={"ID":"63ea8a4a-8b04-44f4-a0a8-4767d02973bb","Type":"ContainerStarted","Data":"3a902a808fd15297819bf67ad1ab84c434f8514facfc37cdabbbdfa679dcd5ce"} Dec 02 18:51:13 crc kubenswrapper[4792]: I1202 18:51:13.291600 4792 generic.go:334] "Generic (PLEG): container finished" podID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerID="29783069c88b8c7271fa207de4d57a84e80f286ae7b64712b05f5ebf8188a477" exitCode=0 Dec 02 18:51:13 crc kubenswrapper[4792]: I1202 18:51:13.291719 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" event={"ID":"63ea8a4a-8b04-44f4-a0a8-4767d02973bb","Type":"ContainerDied","Data":"29783069c88b8c7271fa207de4d57a84e80f286ae7b64712b05f5ebf8188a477"} Dec 02 18:51:15 crc kubenswrapper[4792]: I1202 18:51:15.312220 4792 generic.go:334] "Generic (PLEG): container finished" podID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerID="e1d655402e1f312758163d96aded341b4aa2c61af63898a2256404322ed10bf5" exitCode=0 Dec 02 18:51:15 crc kubenswrapper[4792]: I1202 18:51:15.312330 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" event={"ID":"63ea8a4a-8b04-44f4-a0a8-4767d02973bb","Type":"ContainerDied","Data":"e1d655402e1f312758163d96aded341b4aa2c61af63898a2256404322ed10bf5"} Dec 02 18:51:16 crc kubenswrapper[4792]: I1202 18:51:16.322807 4792 generic.go:334] "Generic (PLEG): container finished" podID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerID="4574c3ac45e5a0fb84b6da17bbac614e4de568c16d48a924ebd2160e536aa418" exitCode=0 Dec 02 18:51:16 crc kubenswrapper[4792]: I1202 18:51:16.322919 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" event={"ID":"63ea8a4a-8b04-44f4-a0a8-4767d02973bb","Type":"ContainerDied","Data":"4574c3ac45e5a0fb84b6da17bbac614e4de568c16d48a924ebd2160e536aa418"} Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.667911 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.852906 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6t4m\" (UniqueName: \"kubernetes.io/projected/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-kube-api-access-c6t4m\") pod \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.852996 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-util\") pod \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.853084 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-bundle\") pod \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\" (UID: \"63ea8a4a-8b04-44f4-a0a8-4767d02973bb\") " Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.853712 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-bundle" (OuterVolumeSpecName: "bundle") pod "63ea8a4a-8b04-44f4-a0a8-4767d02973bb" (UID: "63ea8a4a-8b04-44f4-a0a8-4767d02973bb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.858356 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-kube-api-access-c6t4m" (OuterVolumeSpecName: "kube-api-access-c6t4m") pod "63ea8a4a-8b04-44f4-a0a8-4767d02973bb" (UID: "63ea8a4a-8b04-44f4-a0a8-4767d02973bb"). InnerVolumeSpecName "kube-api-access-c6t4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.954561 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:51:17 crc kubenswrapper[4792]: I1202 18:51:17.954600 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6t4m\" (UniqueName: \"kubernetes.io/projected/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-kube-api-access-c6t4m\") on node \"crc\" DevicePath \"\"" Dec 02 18:51:18 crc kubenswrapper[4792]: I1202 18:51:18.085001 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-util" (OuterVolumeSpecName: "util") pod "63ea8a4a-8b04-44f4-a0a8-4767d02973bb" (UID: "63ea8a4a-8b04-44f4-a0a8-4767d02973bb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:51:18 crc kubenswrapper[4792]: I1202 18:51:18.157284 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/63ea8a4a-8b04-44f4-a0a8-4767d02973bb-util\") on node \"crc\" DevicePath \"\"" Dec 02 18:51:18 crc kubenswrapper[4792]: I1202 18:51:18.342373 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" event={"ID":"63ea8a4a-8b04-44f4-a0a8-4767d02973bb","Type":"ContainerDied","Data":"3a902a808fd15297819bf67ad1ab84c434f8514facfc37cdabbbdfa679dcd5ce"} Dec 02 18:51:18 crc kubenswrapper[4792]: I1202 18:51:18.342416 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a902a808fd15297819bf67ad1ab84c434f8514facfc37cdabbbdfa679dcd5ce" Dec 02 18:51:18 crc kubenswrapper[4792]: I1202 18:51:18.342457 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.152429 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr"] Dec 02 18:51:22 crc kubenswrapper[4792]: E1202 18:51:22.152692 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerName="util" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.152707 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerName="util" Dec 02 18:51:22 crc kubenswrapper[4792]: E1202 18:51:22.152717 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerName="pull" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.152725 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerName="pull" Dec 02 18:51:22 crc kubenswrapper[4792]: E1202 18:51:22.152740 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerName="extract" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.152749 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerName="extract" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.152888 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="63ea8a4a-8b04-44f4-a0a8-4767d02973bb" containerName="extract" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.153337 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.155191 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-btzpb" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.155534 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.156128 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.170788 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr"] Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.314485 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpd2k\" (UniqueName: \"kubernetes.io/projected/9874a853-6f18-456b-9634-c3b923e8113c-kube-api-access-kpd2k\") pod \"nmstate-operator-5b5b58f5c8-npmdr\" (UID: \"9874a853-6f18-456b-9634-c3b923e8113c\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.416701 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpd2k\" (UniqueName: \"kubernetes.io/projected/9874a853-6f18-456b-9634-c3b923e8113c-kube-api-access-kpd2k\") pod \"nmstate-operator-5b5b58f5c8-npmdr\" (UID: \"9874a853-6f18-456b-9634-c3b923e8113c\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.451559 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpd2k\" (UniqueName: \"kubernetes.io/projected/9874a853-6f18-456b-9634-c3b923e8113c-kube-api-access-kpd2k\") pod \"nmstate-operator-5b5b58f5c8-npmdr\" (UID: \"9874a853-6f18-456b-9634-c3b923e8113c\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.473175 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" Dec 02 18:51:22 crc kubenswrapper[4792]: I1202 18:51:22.712812 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr"] Dec 02 18:51:23 crc kubenswrapper[4792]: I1202 18:51:23.374885 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" event={"ID":"9874a853-6f18-456b-9634-c3b923e8113c","Type":"ContainerStarted","Data":"eb65da42d606d1ed5331d04e3d252ede1ee37f985447be999f1ee0f12444a6f1"} Dec 02 18:51:25 crc kubenswrapper[4792]: I1202 18:51:25.387233 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" event={"ID":"9874a853-6f18-456b-9634-c3b923e8113c","Type":"ContainerStarted","Data":"36ca88a4f82ac740765a4f1212aebab6fb8deb349c2038dae4a27fa500c02eec"} Dec 02 18:51:25 crc kubenswrapper[4792]: I1202 18:51:25.410802 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-npmdr" podStartSLOduration=1.292143396 podStartE2EDuration="3.41077811s" podCreationTimestamp="2025-12-02 18:51:22 +0000 UTC" firstStartedPulling="2025-12-02 18:51:22.72909706 +0000 UTC m=+913.501989408" lastFinishedPulling="2025-12-02 18:51:24.847731794 +0000 UTC m=+915.620624122" observedRunningTime="2025-12-02 18:51:25.409219369 +0000 UTC m=+916.182111697" watchObservedRunningTime="2025-12-02 18:51:25.41077811 +0000 UTC m=+916.183670448" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.265968 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.271067 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.317000 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-p7mvv" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.320832 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.328445 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.329280 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.333597 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.336578 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.342590 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-cvm76"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.343463 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.434139 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.435002 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.436454 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-7vlms" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.437321 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439206 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439366 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw8hk\" (UniqueName: \"kubernetes.io/projected/0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0-kube-api-access-nw8hk\") pod \"nmstate-webhook-5f6d4c5ccb-w46s7\" (UID: \"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439411 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-dbus-socket\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439439 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-w46s7\" (UID: \"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439561 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-794f7\" (UniqueName: \"kubernetes.io/projected/59543a69-801f-485f-b683-b9328aab396e-kube-api-access-794f7\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439590 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-ovs-socket\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439620 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-nmstate-lock\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.439664 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mcjq\" (UniqueName: \"kubernetes.io/projected/34e1a790-24ea-4564-8453-f525053ec5fa-kube-api-access-4mcjq\") pod \"nmstate-metrics-7f946cbc9-svbm7\" (UID: \"34e1a790-24ea-4564-8453-f525053ec5fa\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.447805 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.540829 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-dbus-socket\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.540892 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-w46s7\" (UID: \"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.540927 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.540967 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-794f7\" (UniqueName: \"kubernetes.io/projected/59543a69-801f-485f-b683-b9328aab396e-kube-api-access-794f7\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.540996 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-ovs-socket\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541040 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-nmstate-lock\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541073 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7dxp\" (UniqueName: \"kubernetes.io/projected/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-kube-api-access-m7dxp\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541105 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541110 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-dbus-socket\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541139 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mcjq\" (UniqueName: \"kubernetes.io/projected/34e1a790-24ea-4564-8453-f525053ec5fa-kube-api-access-4mcjq\") pod \"nmstate-metrics-7f946cbc9-svbm7\" (UID: \"34e1a790-24ea-4564-8453-f525053ec5fa\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541166 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-ovs-socket\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541175 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw8hk\" (UniqueName: \"kubernetes.io/projected/0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0-kube-api-access-nw8hk\") pod \"nmstate-webhook-5f6d4c5ccb-w46s7\" (UID: \"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.541290 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/59543a69-801f-485f-b683-b9328aab396e-nmstate-lock\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.551378 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-w46s7\" (UID: \"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.562141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw8hk\" (UniqueName: \"kubernetes.io/projected/0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0-kube-api-access-nw8hk\") pod \"nmstate-webhook-5f6d4c5ccb-w46s7\" (UID: \"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.563360 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-794f7\" (UniqueName: \"kubernetes.io/projected/59543a69-801f-485f-b683-b9328aab396e-kube-api-access-794f7\") pod \"nmstate-handler-cvm76\" (UID: \"59543a69-801f-485f-b683-b9328aab396e\") " pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.566916 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mcjq\" (UniqueName: \"kubernetes.io/projected/34e1a790-24ea-4564-8453-f525053ec5fa-kube-api-access-4mcjq\") pod \"nmstate-metrics-7f946cbc9-svbm7\" (UID: \"34e1a790-24ea-4564-8453-f525053ec5fa\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.598406 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-cb4f878bb-962sh"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.599130 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.607975 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-cb4f878bb-962sh"] Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.631676 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.642314 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7dxp\" (UniqueName: \"kubernetes.io/projected/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-kube-api-access-m7dxp\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.642365 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.642536 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.644169 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.646453 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.653790 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.662308 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7dxp\" (UniqueName: \"kubernetes.io/projected/1609afe3-03e9-4bab-8ea5-444ffe47a8a0-kube-api-access-m7dxp\") pod \"nmstate-console-plugin-7fbb5f6569-gs72k\" (UID: \"1609afe3-03e9-4bab-8ea5-444ffe47a8a0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.674861 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.743248 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-trusted-ca-bundle\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.743303 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-service-ca\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.743325 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-serving-cert\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.743345 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sk7z\" (UniqueName: \"kubernetes.io/projected/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-kube-api-access-6sk7z\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.743385 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-oauth-config\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.743409 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-oauth-serving-cert\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.743437 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-config\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.747282 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.844456 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-config\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.845041 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-trusted-ca-bundle\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.845091 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-service-ca\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.845111 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-serving-cert\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.845135 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sk7z\" (UniqueName: \"kubernetes.io/projected/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-kube-api-access-6sk7z\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.845191 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-oauth-config\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.845213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-oauth-serving-cert\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.846568 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-oauth-serving-cert\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.846869 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-service-ca\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.846924 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-config\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.849589 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-trusted-ca-bundle\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.850323 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-oauth-config\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.850439 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-console-serving-cert\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.889135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sk7z\" (UniqueName: \"kubernetes.io/projected/aed0b01d-03c9-4a3e-82b6-3bb7f3952deb-kube-api-access-6sk7z\") pod \"console-cb4f878bb-962sh\" (UID: \"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb\") " pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:31 crc kubenswrapper[4792]: I1202 18:51:31.930813 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.073469 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7"] Dec 02 18:51:32 crc kubenswrapper[4792]: W1202 18:51:32.075769 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34e1a790_24ea_4564_8453_f525053ec5fa.slice/crio-767e5c7015edcbfabad5e71766369ab8bb86bb24878b8174912a352a7c489ac7 WatchSource:0}: Error finding container 767e5c7015edcbfabad5e71766369ab8bb86bb24878b8174912a352a7c489ac7: Status 404 returned error can't find the container with id 767e5c7015edcbfabad5e71766369ab8bb86bb24878b8174912a352a7c489ac7 Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.151946 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-cb4f878bb-962sh"] Dec 02 18:51:32 crc kubenswrapper[4792]: W1202 18:51:32.156103 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaed0b01d_03c9_4a3e_82b6_3bb7f3952deb.slice/crio-c3bb8894b06b3a5d750e12abb96648ff2986b1ad034ff8181d49dde72e526c8a WatchSource:0}: Error finding container c3bb8894b06b3a5d750e12abb96648ff2986b1ad034ff8181d49dde72e526c8a: Status 404 returned error can't find the container with id c3bb8894b06b3a5d750e12abb96648ff2986b1ad034ff8181d49dde72e526c8a Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.156719 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7"] Dec 02 18:51:32 crc kubenswrapper[4792]: W1202 18:51:32.165361 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ef33e67_bcbf_4c68_87d4_5cc1db2e73d0.slice/crio-fd49448d41f1650648d051f021b2bd0a695a8c577c536c8af9503e85a93344d5 WatchSource:0}: Error finding container fd49448d41f1650648d051f021b2bd0a695a8c577c536c8af9503e85a93344d5: Status 404 returned error can't find the container with id fd49448d41f1650648d051f021b2bd0a695a8c577c536c8af9503e85a93344d5 Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.269275 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k"] Dec 02 18:51:32 crc kubenswrapper[4792]: W1202 18:51:32.273305 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1609afe3_03e9_4bab_8ea5_444ffe47a8a0.slice/crio-5c3f4f1ce1dbcafa3976be735508daffd8f47f137f9d8396a255ad96db50f4c9 WatchSource:0}: Error finding container 5c3f4f1ce1dbcafa3976be735508daffd8f47f137f9d8396a255ad96db50f4c9: Status 404 returned error can't find the container with id 5c3f4f1ce1dbcafa3976be735508daffd8f47f137f9d8396a255ad96db50f4c9 Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.434994 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cvm76" event={"ID":"59543a69-801f-485f-b683-b9328aab396e","Type":"ContainerStarted","Data":"56ca429788163866e6572133e9cfd70a2f79221e34b7c752c73295ab6b1aa72f"} Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.437810 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" event={"ID":"34e1a790-24ea-4564-8453-f525053ec5fa","Type":"ContainerStarted","Data":"767e5c7015edcbfabad5e71766369ab8bb86bb24878b8174912a352a7c489ac7"} Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.439335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" event={"ID":"1609afe3-03e9-4bab-8ea5-444ffe47a8a0","Type":"ContainerStarted","Data":"5c3f4f1ce1dbcafa3976be735508daffd8f47f137f9d8396a255ad96db50f4c9"} Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.440396 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-cb4f878bb-962sh" event={"ID":"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb","Type":"ContainerStarted","Data":"c3bb8894b06b3a5d750e12abb96648ff2986b1ad034ff8181d49dde72e526c8a"} Dec 02 18:51:32 crc kubenswrapper[4792]: I1202 18:51:32.442222 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" event={"ID":"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0","Type":"ContainerStarted","Data":"fd49448d41f1650648d051f021b2bd0a695a8c577c536c8af9503e85a93344d5"} Dec 02 18:51:33 crc kubenswrapper[4792]: I1202 18:51:33.454130 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-cb4f878bb-962sh" event={"ID":"aed0b01d-03c9-4a3e-82b6-3bb7f3952deb","Type":"ContainerStarted","Data":"42e44e8c7e0a1a160e5fd095158ca41076df94fad13743c157a370fa416cdb56"} Dec 02 18:51:33 crc kubenswrapper[4792]: I1202 18:51:33.475923 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-cb4f878bb-962sh" podStartSLOduration=2.475883514 podStartE2EDuration="2.475883514s" podCreationTimestamp="2025-12-02 18:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:51:33.471308647 +0000 UTC m=+924.244200965" watchObservedRunningTime="2025-12-02 18:51:33.475883514 +0000 UTC m=+924.248775842" Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.478698 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" event={"ID":"0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0","Type":"ContainerStarted","Data":"e1a1a39f598aae9056e87861567640ca410b5450964e077422a13a4b4044c59a"} Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.479295 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.480215 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-cvm76" event={"ID":"59543a69-801f-485f-b683-b9328aab396e","Type":"ContainerStarted","Data":"f1353b6d7157c047796993d4ea812af12b0b5b2f6716523caa3e93b34efd8468"} Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.480376 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.481945 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" event={"ID":"34e1a790-24ea-4564-8453-f525053ec5fa","Type":"ContainerStarted","Data":"e4157eb270375f3905bcdf71e4393fc60602a739644ac389e25f027c30088abf"} Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.483578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" event={"ID":"1609afe3-03e9-4bab-8ea5-444ffe47a8a0","Type":"ContainerStarted","Data":"e9e1c0394f9fd91505da99da4dbbd57f1a9bac031e56422886e5733043d2675c"} Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.495393 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" podStartSLOduration=1.75324536 podStartE2EDuration="5.495368776s" podCreationTimestamp="2025-12-02 18:51:31 +0000 UTC" firstStartedPulling="2025-12-02 18:51:32.173939637 +0000 UTC m=+922.946831965" lastFinishedPulling="2025-12-02 18:51:35.916063023 +0000 UTC m=+926.688955381" observedRunningTime="2025-12-02 18:51:36.491720443 +0000 UTC m=+927.264612781" watchObservedRunningTime="2025-12-02 18:51:36.495368776 +0000 UTC m=+927.268261114" Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.540647 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-cvm76" podStartSLOduration=1.300056625 podStartE2EDuration="5.540624218s" podCreationTimestamp="2025-12-02 18:51:31 +0000 UTC" firstStartedPulling="2025-12-02 18:51:31.706128517 +0000 UTC m=+922.479020845" lastFinishedPulling="2025-12-02 18:51:35.94669609 +0000 UTC m=+926.719588438" observedRunningTime="2025-12-02 18:51:36.519008993 +0000 UTC m=+927.291901321" watchObservedRunningTime="2025-12-02 18:51:36.540624218 +0000 UTC m=+927.313516566" Dec 02 18:51:36 crc kubenswrapper[4792]: I1202 18:51:36.544106 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gs72k" podStartSLOduration=1.906139606 podStartE2EDuration="5.544087427s" podCreationTimestamp="2025-12-02 18:51:31 +0000 UTC" firstStartedPulling="2025-12-02 18:51:32.276974763 +0000 UTC m=+923.049867101" lastFinishedPulling="2025-12-02 18:51:35.914922554 +0000 UTC m=+926.687814922" observedRunningTime="2025-12-02 18:51:36.536215485 +0000 UTC m=+927.309107813" watchObservedRunningTime="2025-12-02 18:51:36.544087427 +0000 UTC m=+927.316979795" Dec 02 18:51:38 crc kubenswrapper[4792]: I1202 18:51:38.082297 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:51:38 crc kubenswrapper[4792]: I1202 18:51:38.082735 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:51:39 crc kubenswrapper[4792]: I1202 18:51:39.504240 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" event={"ID":"34e1a790-24ea-4564-8453-f525053ec5fa","Type":"ContainerStarted","Data":"9da78fe29e2877fef0a9172bcf568ce2e6b0b53afde36795526da99885189cce"} Dec 02 18:51:39 crc kubenswrapper[4792]: I1202 18:51:39.520014 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-svbm7" podStartSLOduration=1.542341076 podStartE2EDuration="8.519995001s" podCreationTimestamp="2025-12-02 18:51:31 +0000 UTC" firstStartedPulling="2025-12-02 18:51:32.078538978 +0000 UTC m=+922.851431306" lastFinishedPulling="2025-12-02 18:51:39.056192883 +0000 UTC m=+929.829085231" observedRunningTime="2025-12-02 18:51:39.519445247 +0000 UTC m=+930.292337595" watchObservedRunningTime="2025-12-02 18:51:39.519995001 +0000 UTC m=+930.292887339" Dec 02 18:51:41 crc kubenswrapper[4792]: I1202 18:51:41.714230 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-cvm76" Dec 02 18:51:41 crc kubenswrapper[4792]: I1202 18:51:41.931850 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:41 crc kubenswrapper[4792]: I1202 18:51:41.932127 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:41 crc kubenswrapper[4792]: I1202 18:51:41.940074 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:42 crc kubenswrapper[4792]: I1202 18:51:42.531832 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-cb4f878bb-962sh" Dec 02 18:51:42 crc kubenswrapper[4792]: I1202 18:51:42.649118 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cds6s"] Dec 02 18:51:51 crc kubenswrapper[4792]: I1202 18:51:51.663796 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w46s7" Dec 02 18:52:07 crc kubenswrapper[4792]: I1202 18:52:07.700445 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-cds6s" podUID="95d5c817-e302-4f46-9db2-333b21486a7c" containerName="console" containerID="cri-o://aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e" gracePeriod=15 Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.081907 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.082183 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.206471 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cds6s_95d5c817-e302-4f46-9db2-333b21486a7c/console/0.log" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.206560 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.325997 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-service-ca\") pod \"95d5c817-e302-4f46-9db2-333b21486a7c\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.326065 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-serving-cert\") pod \"95d5c817-e302-4f46-9db2-333b21486a7c\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.326118 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-console-config\") pod \"95d5c817-e302-4f46-9db2-333b21486a7c\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.326213 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjxq7\" (UniqueName: \"kubernetes.io/projected/95d5c817-e302-4f46-9db2-333b21486a7c-kube-api-access-xjxq7\") pod \"95d5c817-e302-4f46-9db2-333b21486a7c\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.326242 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-oauth-config\") pod \"95d5c817-e302-4f46-9db2-333b21486a7c\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.326276 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-oauth-serving-cert\") pod \"95d5c817-e302-4f46-9db2-333b21486a7c\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.326326 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-trusted-ca-bundle\") pod \"95d5c817-e302-4f46-9db2-333b21486a7c\" (UID: \"95d5c817-e302-4f46-9db2-333b21486a7c\") " Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.326757 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-service-ca" (OuterVolumeSpecName: "service-ca") pod "95d5c817-e302-4f46-9db2-333b21486a7c" (UID: "95d5c817-e302-4f46-9db2-333b21486a7c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.327169 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "95d5c817-e302-4f46-9db2-333b21486a7c" (UID: "95d5c817-e302-4f46-9db2-333b21486a7c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.327832 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "95d5c817-e302-4f46-9db2-333b21486a7c" (UID: "95d5c817-e302-4f46-9db2-333b21486a7c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.328273 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-console-config" (OuterVolumeSpecName: "console-config") pod "95d5c817-e302-4f46-9db2-333b21486a7c" (UID: "95d5c817-e302-4f46-9db2-333b21486a7c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.332461 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95d5c817-e302-4f46-9db2-333b21486a7c-kube-api-access-xjxq7" (OuterVolumeSpecName: "kube-api-access-xjxq7") pod "95d5c817-e302-4f46-9db2-333b21486a7c" (UID: "95d5c817-e302-4f46-9db2-333b21486a7c"). InnerVolumeSpecName "kube-api-access-xjxq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.332792 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "95d5c817-e302-4f46-9db2-333b21486a7c" (UID: "95d5c817-e302-4f46-9db2-333b21486a7c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.332832 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "95d5c817-e302-4f46-9db2-333b21486a7c" (UID: "95d5c817-e302-4f46-9db2-333b21486a7c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.427348 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjxq7\" (UniqueName: \"kubernetes.io/projected/95d5c817-e302-4f46-9db2-333b21486a7c-kube-api-access-xjxq7\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.427391 4792 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.427405 4792 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.427417 4792 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.427428 4792 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.427440 4792 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d5c817-e302-4f46-9db2-333b21486a7c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.427451 4792 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95d5c817-e302-4f46-9db2-333b21486a7c-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.727805 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cds6s_95d5c817-e302-4f46-9db2-333b21486a7c/console/0.log" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.727897 4792 generic.go:334] "Generic (PLEG): container finished" podID="95d5c817-e302-4f46-9db2-333b21486a7c" containerID="aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e" exitCode=2 Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.727949 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cds6s" event={"ID":"95d5c817-e302-4f46-9db2-333b21486a7c","Type":"ContainerDied","Data":"aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e"} Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.727995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cds6s" event={"ID":"95d5c817-e302-4f46-9db2-333b21486a7c","Type":"ContainerDied","Data":"57cb29eca3f408a62a2f48e72eec76b83bd73a0288a8c2ea02f3e8db8f116018"} Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.728036 4792 scope.go:117] "RemoveContainer" containerID="aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.728249 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cds6s" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.767624 4792 scope.go:117] "RemoveContainer" containerID="aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e" Dec 02 18:52:08 crc kubenswrapper[4792]: E1202 18:52:08.768993 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e\": container with ID starting with aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e not found: ID does not exist" containerID="aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.769073 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e"} err="failed to get container status \"aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e\": rpc error: code = NotFound desc = could not find container \"aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e\": container with ID starting with aaafade72e80ab6a58561ed3d3c64daabe383dcdc7116646bf654ae7c0f16e1e not found: ID does not exist" Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.788878 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cds6s"] Dec 02 18:52:08 crc kubenswrapper[4792]: I1202 18:52:08.796546 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-cds6s"] Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.528428 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7"] Dec 02 18:52:09 crc kubenswrapper[4792]: E1202 18:52:09.528703 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95d5c817-e302-4f46-9db2-333b21486a7c" containerName="console" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.528715 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="95d5c817-e302-4f46-9db2-333b21486a7c" containerName="console" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.528835 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="95d5c817-e302-4f46-9db2-333b21486a7c" containerName="console" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.529713 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.532774 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.549580 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95d5c817-e302-4f46-9db2-333b21486a7c" path="/var/lib/kubelet/pods/95d5c817-e302-4f46-9db2-333b21486a7c/volumes" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.550010 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7"] Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.645716 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.645760 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcgnb\" (UniqueName: \"kubernetes.io/projected/5bdd4894-7306-472f-ae3d-2d1a55966015-kube-api-access-bcgnb\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.645798 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.747216 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.747259 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcgnb\" (UniqueName: \"kubernetes.io/projected/5bdd4894-7306-472f-ae3d-2d1a55966015-kube-api-access-bcgnb\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.747297 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.747722 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.747917 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.777935 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcgnb\" (UniqueName: \"kubernetes.io/projected/5bdd4894-7306-472f-ae3d-2d1a55966015-kube-api-access-bcgnb\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:09 crc kubenswrapper[4792]: I1202 18:52:09.845250 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:10 crc kubenswrapper[4792]: I1202 18:52:10.330517 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7"] Dec 02 18:52:10 crc kubenswrapper[4792]: W1202 18:52:10.334494 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bdd4894_7306_472f_ae3d_2d1a55966015.slice/crio-6c236cf5948a1f697a4c0c89911a086e48c59a4e9944177d09e754750128b7cb WatchSource:0}: Error finding container 6c236cf5948a1f697a4c0c89911a086e48c59a4e9944177d09e754750128b7cb: Status 404 returned error can't find the container with id 6c236cf5948a1f697a4c0c89911a086e48c59a4e9944177d09e754750128b7cb Dec 02 18:52:10 crc kubenswrapper[4792]: I1202 18:52:10.754272 4792 generic.go:334] "Generic (PLEG): container finished" podID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerID="42295dd8091abb691e3b47529e97fd33f50c79bb74d3097e0d1fdcae863494dc" exitCode=0 Dec 02 18:52:10 crc kubenswrapper[4792]: I1202 18:52:10.755120 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" event={"ID":"5bdd4894-7306-472f-ae3d-2d1a55966015","Type":"ContainerDied","Data":"42295dd8091abb691e3b47529e97fd33f50c79bb74d3097e0d1fdcae863494dc"} Dec 02 18:52:10 crc kubenswrapper[4792]: I1202 18:52:10.756130 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" event={"ID":"5bdd4894-7306-472f-ae3d-2d1a55966015","Type":"ContainerStarted","Data":"6c236cf5948a1f697a4c0c89911a086e48c59a4e9944177d09e754750128b7cb"} Dec 02 18:52:12 crc kubenswrapper[4792]: I1202 18:52:12.768871 4792 generic.go:334] "Generic (PLEG): container finished" podID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerID="028a6a925db6fdc4d8de5a4564f4bde56501ebaf503a1371b55fd1555c905757" exitCode=0 Dec 02 18:52:12 crc kubenswrapper[4792]: I1202 18:52:12.768942 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" event={"ID":"5bdd4894-7306-472f-ae3d-2d1a55966015","Type":"ContainerDied","Data":"028a6a925db6fdc4d8de5a4564f4bde56501ebaf503a1371b55fd1555c905757"} Dec 02 18:52:13 crc kubenswrapper[4792]: I1202 18:52:13.777197 4792 generic.go:334] "Generic (PLEG): container finished" podID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerID="ae4d6af7451327c9047a1d5f53500e63aeadad3ce0c28cafc60b1a14ad3e7f74" exitCode=0 Dec 02 18:52:13 crc kubenswrapper[4792]: I1202 18:52:13.777269 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" event={"ID":"5bdd4894-7306-472f-ae3d-2d1a55966015","Type":"ContainerDied","Data":"ae4d6af7451327c9047a1d5f53500e63aeadad3ce0c28cafc60b1a14ad3e7f74"} Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.064139 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.128634 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcgnb\" (UniqueName: \"kubernetes.io/projected/5bdd4894-7306-472f-ae3d-2d1a55966015-kube-api-access-bcgnb\") pod \"5bdd4894-7306-472f-ae3d-2d1a55966015\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.128752 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-bundle\") pod \"5bdd4894-7306-472f-ae3d-2d1a55966015\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.128906 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-util\") pod \"5bdd4894-7306-472f-ae3d-2d1a55966015\" (UID: \"5bdd4894-7306-472f-ae3d-2d1a55966015\") " Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.130538 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-bundle" (OuterVolumeSpecName: "bundle") pod "5bdd4894-7306-472f-ae3d-2d1a55966015" (UID: "5bdd4894-7306-472f-ae3d-2d1a55966015"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.143739 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bdd4894-7306-472f-ae3d-2d1a55966015-kube-api-access-bcgnb" (OuterVolumeSpecName: "kube-api-access-bcgnb") pod "5bdd4894-7306-472f-ae3d-2d1a55966015" (UID: "5bdd4894-7306-472f-ae3d-2d1a55966015"). InnerVolumeSpecName "kube-api-access-bcgnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.148321 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-util" (OuterVolumeSpecName: "util") pod "5bdd4894-7306-472f-ae3d-2d1a55966015" (UID: "5bdd4894-7306-472f-ae3d-2d1a55966015"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.230570 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.230935 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5bdd4894-7306-472f-ae3d-2d1a55966015-util\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.231075 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcgnb\" (UniqueName: \"kubernetes.io/projected/5bdd4894-7306-472f-ae3d-2d1a55966015-kube-api-access-bcgnb\") on node \"crc\" DevicePath \"\"" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.794882 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" event={"ID":"5bdd4894-7306-472f-ae3d-2d1a55966015","Type":"ContainerDied","Data":"6c236cf5948a1f697a4c0c89911a086e48c59a4e9944177d09e754750128b7cb"} Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.795483 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c236cf5948a1f697a4c0c89911a086e48c59a4e9944177d09e754750128b7cb" Dec 02 18:52:15 crc kubenswrapper[4792]: I1202 18:52:15.795021 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.539461 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl"] Dec 02 18:52:28 crc kubenswrapper[4792]: E1202 18:52:28.540264 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerName="pull" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.540277 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerName="pull" Dec 02 18:52:28 crc kubenswrapper[4792]: E1202 18:52:28.540289 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerName="util" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.540296 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerName="util" Dec 02 18:52:28 crc kubenswrapper[4792]: E1202 18:52:28.540316 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerName="extract" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.540323 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerName="extract" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.540452 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bdd4894-7306-472f-ae3d-2d1a55966015" containerName="extract" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.540890 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.542637 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.543104 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-gt5h8" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.543202 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.543411 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.543659 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.551172 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl"] Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.646427 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6ba2dc70-5519-41e5-b9fe-57508fd8e395-webhook-cert\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.646507 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6ba2dc70-5519-41e5-b9fe-57508fd8e395-apiservice-cert\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.646640 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxnmt\" (UniqueName: \"kubernetes.io/projected/6ba2dc70-5519-41e5-b9fe-57508fd8e395-kube-api-access-kxnmt\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.747979 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxnmt\" (UniqueName: \"kubernetes.io/projected/6ba2dc70-5519-41e5-b9fe-57508fd8e395-kube-api-access-kxnmt\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.748092 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6ba2dc70-5519-41e5-b9fe-57508fd8e395-webhook-cert\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.748140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6ba2dc70-5519-41e5-b9fe-57508fd8e395-apiservice-cert\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.759627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6ba2dc70-5519-41e5-b9fe-57508fd8e395-webhook-cert\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.760505 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6ba2dc70-5519-41e5-b9fe-57508fd8e395-apiservice-cert\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.775427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxnmt\" (UniqueName: \"kubernetes.io/projected/6ba2dc70-5519-41e5-b9fe-57508fd8e395-kube-api-access-kxnmt\") pod \"metallb-operator-controller-manager-7d46f7f9d9-8xxzl\" (UID: \"6ba2dc70-5519-41e5-b9fe-57508fd8e395\") " pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.862126 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.960306 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6974b4d988-96g88"] Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.961316 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.966310 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.966323 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-zkv6t" Dec 02 18:52:28 crc kubenswrapper[4792]: I1202 18:52:28.966586 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.014284 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6974b4d988-96g88"] Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.152213 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/11d281b1-8805-4a7a-b234-53a4e52ae307-webhook-cert\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.152343 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/11d281b1-8805-4a7a-b234-53a4e52ae307-apiservice-cert\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.152474 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwt76\" (UniqueName: \"kubernetes.io/projected/11d281b1-8805-4a7a-b234-53a4e52ae307-kube-api-access-qwt76\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.153451 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl"] Dec 02 18:52:29 crc kubenswrapper[4792]: W1202 18:52:29.159748 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ba2dc70_5519_41e5_b9fe_57508fd8e395.slice/crio-39ef640a5b24c3174c4d6f8d69fc539f9f07fd1bb3c3a9564288714c02098978 WatchSource:0}: Error finding container 39ef640a5b24c3174c4d6f8d69fc539f9f07fd1bb3c3a9564288714c02098978: Status 404 returned error can't find the container with id 39ef640a5b24c3174c4d6f8d69fc539f9f07fd1bb3c3a9564288714c02098978 Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.254111 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/11d281b1-8805-4a7a-b234-53a4e52ae307-webhook-cert\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.254635 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/11d281b1-8805-4a7a-b234-53a4e52ae307-apiservice-cert\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.254680 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwt76\" (UniqueName: \"kubernetes.io/projected/11d281b1-8805-4a7a-b234-53a4e52ae307-kube-api-access-qwt76\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.261682 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/11d281b1-8805-4a7a-b234-53a4e52ae307-apiservice-cert\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.262298 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/11d281b1-8805-4a7a-b234-53a4e52ae307-webhook-cert\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.274963 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwt76\" (UniqueName: \"kubernetes.io/projected/11d281b1-8805-4a7a-b234-53a4e52ae307-kube-api-access-qwt76\") pod \"metallb-operator-webhook-server-6974b4d988-96g88\" (UID: \"11d281b1-8805-4a7a-b234-53a4e52ae307\") " pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.280880 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.520455 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6974b4d988-96g88"] Dec 02 18:52:29 crc kubenswrapper[4792]: W1202 18:52:29.521414 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11d281b1_8805_4a7a_b234_53a4e52ae307.slice/crio-190549b92d18b1bffa27d6403e110efd1a77f15b5ec4111017dac60f4f27f8f5 WatchSource:0}: Error finding container 190549b92d18b1bffa27d6403e110efd1a77f15b5ec4111017dac60f4f27f8f5: Status 404 returned error can't find the container with id 190549b92d18b1bffa27d6403e110efd1a77f15b5ec4111017dac60f4f27f8f5 Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.903968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" event={"ID":"11d281b1-8805-4a7a-b234-53a4e52ae307","Type":"ContainerStarted","Data":"190549b92d18b1bffa27d6403e110efd1a77f15b5ec4111017dac60f4f27f8f5"} Dec 02 18:52:29 crc kubenswrapper[4792]: I1202 18:52:29.905414 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" event={"ID":"6ba2dc70-5519-41e5-b9fe-57508fd8e395","Type":"ContainerStarted","Data":"39ef640a5b24c3174c4d6f8d69fc539f9f07fd1bb3c3a9564288714c02098978"} Dec 02 18:52:32 crc kubenswrapper[4792]: I1202 18:52:32.939068 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" event={"ID":"6ba2dc70-5519-41e5-b9fe-57508fd8e395","Type":"ContainerStarted","Data":"dc96475368cfcf5feeaf210a7f5c37a600f9c4c842c0db0245b3952caa5ee448"} Dec 02 18:52:32 crc kubenswrapper[4792]: I1202 18:52:32.939578 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:52:32 crc kubenswrapper[4792]: I1202 18:52:32.963507 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" podStartSLOduration=1.5641702309999999 podStartE2EDuration="4.963489474s" podCreationTimestamp="2025-12-02 18:52:28 +0000 UTC" firstStartedPulling="2025-12-02 18:52:29.162124368 +0000 UTC m=+979.935016696" lastFinishedPulling="2025-12-02 18:52:32.561443611 +0000 UTC m=+983.334335939" observedRunningTime="2025-12-02 18:52:32.960789234 +0000 UTC m=+983.733681562" watchObservedRunningTime="2025-12-02 18:52:32.963489474 +0000 UTC m=+983.736381802" Dec 02 18:52:34 crc kubenswrapper[4792]: I1202 18:52:34.958679 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" event={"ID":"11d281b1-8805-4a7a-b234-53a4e52ae307","Type":"ContainerStarted","Data":"1c65545b5998a0e9051f7cd8f030559ddc37bc94a25c29b89e8b79ec19e61dd9"} Dec 02 18:52:34 crc kubenswrapper[4792]: I1202 18:52:34.959066 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:52:34 crc kubenswrapper[4792]: I1202 18:52:34.998319 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" podStartSLOduration=1.880000154 podStartE2EDuration="6.998290326s" podCreationTimestamp="2025-12-02 18:52:28 +0000 UTC" firstStartedPulling="2025-12-02 18:52:29.524150941 +0000 UTC m=+980.297043289" lastFinishedPulling="2025-12-02 18:52:34.642441113 +0000 UTC m=+985.415333461" observedRunningTime="2025-12-02 18:52:34.995793291 +0000 UTC m=+985.768685679" watchObservedRunningTime="2025-12-02 18:52:34.998290326 +0000 UTC m=+985.771182694" Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.081084 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.081476 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.081584 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.082424 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"51777911bdc2fd4cba93567e9367b9a4b299d12dfa4458a4ce8aa3d35773b2ea"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.082556 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://51777911bdc2fd4cba93567e9367b9a4b299d12dfa4458a4ce8aa3d35773b2ea" gracePeriod=600 Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.986332 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="51777911bdc2fd4cba93567e9367b9a4b299d12dfa4458a4ce8aa3d35773b2ea" exitCode=0 Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.986395 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"51777911bdc2fd4cba93567e9367b9a4b299d12dfa4458a4ce8aa3d35773b2ea"} Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.986972 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"38cdfcc4ac221e244e725dd6d1a5012b531f01f9bc318459a440b509a9410447"} Dec 02 18:52:38 crc kubenswrapper[4792]: I1202 18:52:38.987002 4792 scope.go:117] "RemoveContainer" containerID="2d504f78e5a425b84ad94074fbe4a535e7551511fbcfda776b0f6a3eefb2619a" Dec 02 18:52:49 crc kubenswrapper[4792]: I1202 18:52:49.293637 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6974b4d988-96g88" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.321778 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pmltd"] Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.324917 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.339427 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pmltd"] Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.479601 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-catalog-content\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.479676 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5db4j\" (UniqueName: \"kubernetes.io/projected/bb074f17-56fa-423c-958b-f37564b8433a-kube-api-access-5db4j\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.479750 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-utilities\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.581369 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-utilities\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.581449 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-catalog-content\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.581484 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5db4j\" (UniqueName: \"kubernetes.io/projected/bb074f17-56fa-423c-958b-f37564b8433a-kube-api-access-5db4j\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.581999 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-utilities\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.582091 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-catalog-content\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.627346 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5db4j\" (UniqueName: \"kubernetes.io/projected/bb074f17-56fa-423c-958b-f37564b8433a-kube-api-access-5db4j\") pod \"certified-operators-pmltd\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.694723 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:05 crc kubenswrapper[4792]: I1202 18:53:05.913834 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pmltd"] Dec 02 18:53:06 crc kubenswrapper[4792]: I1202 18:53:06.200676 4792 generic.go:334] "Generic (PLEG): container finished" podID="bb074f17-56fa-423c-958b-f37564b8433a" containerID="8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75" exitCode=0 Dec 02 18:53:06 crc kubenswrapper[4792]: I1202 18:53:06.200713 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmltd" event={"ID":"bb074f17-56fa-423c-958b-f37564b8433a","Type":"ContainerDied","Data":"8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75"} Dec 02 18:53:06 crc kubenswrapper[4792]: I1202 18:53:06.200737 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmltd" event={"ID":"bb074f17-56fa-423c-958b-f37564b8433a","Type":"ContainerStarted","Data":"60dcd35978956f2ceb493b2590975a513edc7e30899968cd53d6732ec2d68453"} Dec 02 18:53:06 crc kubenswrapper[4792]: I1202 18:53:06.201997 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 18:53:08 crc kubenswrapper[4792]: I1202 18:53:08.216661 4792 generic.go:334] "Generic (PLEG): container finished" podID="bb074f17-56fa-423c-958b-f37564b8433a" containerID="62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30" exitCode=0 Dec 02 18:53:08 crc kubenswrapper[4792]: I1202 18:53:08.216742 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmltd" event={"ID":"bb074f17-56fa-423c-958b-f37564b8433a","Type":"ContainerDied","Data":"62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30"} Dec 02 18:53:08 crc kubenswrapper[4792]: I1202 18:53:08.867360 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7d46f7f9d9-8xxzl" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.224856 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmltd" event={"ID":"bb074f17-56fa-423c-958b-f37564b8433a","Type":"ContainerStarted","Data":"e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb"} Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.254846 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pmltd" podStartSLOduration=1.85531465 podStartE2EDuration="4.254811934s" podCreationTimestamp="2025-12-02 18:53:05 +0000 UTC" firstStartedPulling="2025-12-02 18:53:06.201793326 +0000 UTC m=+1016.974685654" lastFinishedPulling="2025-12-02 18:53:08.60129057 +0000 UTC m=+1019.374182938" observedRunningTime="2025-12-02 18:53:09.243619834 +0000 UTC m=+1020.016512162" watchObservedRunningTime="2025-12-02 18:53:09.254811934 +0000 UTC m=+1020.027704292" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.729690 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-st62r"] Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.732472 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.734133 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.734584 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-lctfn" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.736465 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.743284 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb"] Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.744149 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.746197 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.754119 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb"] Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.816617 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-fmdjd"] Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.817500 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fmdjd" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.820780 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.821217 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.821309 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.821841 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-8fsfm" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-startup\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837276 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-reloader\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837356 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837424 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-sockets\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837604 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4be13a2e-5592-4f44-ad74-31cf277205bf-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-cjmwb\" (UID: \"4be13a2e-5592-4f44-ad74-31cf277205bf\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837677 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c725s\" (UniqueName: \"kubernetes.io/projected/57c06fe7-dccf-4f91-a9b1-21d345dd688e-kube-api-access-c725s\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837756 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btwzr\" (UniqueName: \"kubernetes.io/projected/4be13a2e-5592-4f44-ad74-31cf277205bf-kube-api-access-btwzr\") pod \"frr-k8s-webhook-server-7fcb986d4-cjmwb\" (UID: \"4be13a2e-5592-4f44-ad74-31cf277205bf\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837865 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-conf\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.837943 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics-certs\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.838659 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-pjmcf"] Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.856253 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.860744 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.862828 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-pjmcf"] Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.938914 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-conf\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.938963 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-metallb-excludel2\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.938980 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics-certs\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939015 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz8cf\" (UniqueName: \"kubernetes.io/projected/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-kube-api-access-vz8cf\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939039 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939062 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-startup\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939075 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-reloader\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939093 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-cert\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939109 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939125 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-sockets\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939141 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4be13a2e-5592-4f44-ad74-31cf277205bf-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-cjmwb\" (UID: \"4be13a2e-5592-4f44-ad74-31cf277205bf\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939155 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c725s\" (UniqueName: \"kubernetes.io/projected/57c06fe7-dccf-4f91-a9b1-21d345dd688e-kube-api-access-c725s\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939182 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sh49\" (UniqueName: \"kubernetes.io/projected/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-kube-api-access-4sh49\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btwzr\" (UniqueName: \"kubernetes.io/projected/4be13a2e-5592-4f44-ad74-31cf277205bf-kube-api-access-btwzr\") pod \"frr-k8s-webhook-server-7fcb986d4-cjmwb\" (UID: \"4be13a2e-5592-4f44-ad74-31cf277205bf\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939233 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-metrics-certs\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939248 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-metrics-certs\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939617 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-conf\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: E1202 18:53:09.939768 4792 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 02 18:53:09 crc kubenswrapper[4792]: E1202 18:53:09.939815 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4be13a2e-5592-4f44-ad74-31cf277205bf-cert podName:4be13a2e-5592-4f44-ad74-31cf277205bf nodeName:}" failed. No retries permitted until 2025-12-02 18:53:10.439799556 +0000 UTC m=+1021.212691884 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4be13a2e-5592-4f44-ad74-31cf277205bf-cert") pod "frr-k8s-webhook-server-7fcb986d4-cjmwb" (UID: "4be13a2e-5592-4f44-ad74-31cf277205bf") : secret "frr-k8s-webhook-server-cert" not found Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.939960 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-reloader\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.940216 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.940407 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-sockets\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: E1202 18:53:09.940492 4792 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 02 18:53:09 crc kubenswrapper[4792]: E1202 18:53:09.940588 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics-certs podName:57c06fe7-dccf-4f91-a9b1-21d345dd688e nodeName:}" failed. No retries permitted until 2025-12-02 18:53:10.440576136 +0000 UTC m=+1021.213468464 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics-certs") pod "frr-k8s-st62r" (UID: "57c06fe7-dccf-4f91-a9b1-21d345dd688e") : secret "frr-k8s-certs-secret" not found Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.940587 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/57c06fe7-dccf-4f91-a9b1-21d345dd688e-frr-startup\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.968689 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c725s\" (UniqueName: \"kubernetes.io/projected/57c06fe7-dccf-4f91-a9b1-21d345dd688e-kube-api-access-c725s\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:09 crc kubenswrapper[4792]: I1202 18:53:09.977867 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btwzr\" (UniqueName: \"kubernetes.io/projected/4be13a2e-5592-4f44-ad74-31cf277205bf-kube-api-access-btwzr\") pod \"frr-k8s-webhook-server-7fcb986d4-cjmwb\" (UID: \"4be13a2e-5592-4f44-ad74-31cf277205bf\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.040317 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz8cf\" (UniqueName: \"kubernetes.io/projected/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-kube-api-access-vz8cf\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.040651 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.040753 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-cert\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.040893 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sh49\" (UniqueName: \"kubernetes.io/projected/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-kube-api-access-4sh49\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.040991 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-metrics-certs\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.041087 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-metrics-certs\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.041211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-metallb-excludel2\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: E1202 18:53:10.041666 4792 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 18:53:10 crc kubenswrapper[4792]: E1202 18:53:10.041792 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist podName:38a2f9f3-6ce0-4790-8d68-59a8fc723caa nodeName:}" failed. No retries permitted until 2025-12-02 18:53:10.541774765 +0000 UTC m=+1021.314667093 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist") pod "speaker-fmdjd" (UID: "38a2f9f3-6ce0-4790-8d68-59a8fc723caa") : secret "metallb-memberlist" not found Dec 02 18:53:10 crc kubenswrapper[4792]: E1202 18:53:10.041792 4792 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 02 18:53:10 crc kubenswrapper[4792]: E1202 18:53:10.041986 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-metrics-certs podName:cbe95ba3-c2a1-4755-8571-ddaba0aca9d6 nodeName:}" failed. No retries permitted until 2025-12-02 18:53:10.54197631 +0000 UTC m=+1021.314868628 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-metrics-certs") pod "controller-f8648f98b-pjmcf" (UID: "cbe95ba3-c2a1-4755-8571-ddaba0aca9d6") : secret "controller-certs-secret" not found Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.043325 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-metallb-excludel2\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.054795 4792 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.054904 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-metrics-certs\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.059981 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sh49\" (UniqueName: \"kubernetes.io/projected/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-kube-api-access-4sh49\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.062126 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz8cf\" (UniqueName: \"kubernetes.io/projected/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-kube-api-access-vz8cf\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.065488 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-cert\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.446643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics-certs\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.448301 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4be13a2e-5592-4f44-ad74-31cf277205bf-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-cjmwb\" (UID: \"4be13a2e-5592-4f44-ad74-31cf277205bf\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.449435 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/57c06fe7-dccf-4f91-a9b1-21d345dd688e-metrics-certs\") pod \"frr-k8s-st62r\" (UID: \"57c06fe7-dccf-4f91-a9b1-21d345dd688e\") " pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.453588 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4be13a2e-5592-4f44-ad74-31cf277205bf-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-cjmwb\" (UID: \"4be13a2e-5592-4f44-ad74-31cf277205bf\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.549783 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.549928 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-metrics-certs\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:10 crc kubenswrapper[4792]: E1202 18:53:10.550312 4792 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 18:53:10 crc kubenswrapper[4792]: E1202 18:53:10.550518 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist podName:38a2f9f3-6ce0-4790-8d68-59a8fc723caa nodeName:}" failed. No retries permitted until 2025-12-02 18:53:11.550485338 +0000 UTC m=+1022.323377706 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist") pod "speaker-fmdjd" (UID: "38a2f9f3-6ce0-4790-8d68-59a8fc723caa") : secret "metallb-memberlist" not found Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.554093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cbe95ba3-c2a1-4755-8571-ddaba0aca9d6-metrics-certs\") pod \"controller-f8648f98b-pjmcf\" (UID: \"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6\") " pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.653063 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.667764 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:10 crc kubenswrapper[4792]: I1202 18:53:10.815096 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:11 crc kubenswrapper[4792]: W1202 18:53:11.119990 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbe95ba3_c2a1_4755_8571_ddaba0aca9d6.slice/crio-2ae9fad0f88def9815bbf270369d1b549d1a4bccba553de5bb6e5f0b887974bb WatchSource:0}: Error finding container 2ae9fad0f88def9815bbf270369d1b549d1a4bccba553de5bb6e5f0b887974bb: Status 404 returned error can't find the container with id 2ae9fad0f88def9815bbf270369d1b549d1a4bccba553de5bb6e5f0b887974bb Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.120856 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-pjmcf"] Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.134681 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb"] Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.237031 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-pjmcf" event={"ID":"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6","Type":"ContainerStarted","Data":"2ae9fad0f88def9815bbf270369d1b549d1a4bccba553de5bb6e5f0b887974bb"} Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.238453 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerStarted","Data":"04d9642a30cef36656e4ef6418ca5a27ccc1d458daa1aa2b68fbe069e4604fc2"} Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.239901 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" event={"ID":"4be13a2e-5592-4f44-ad74-31cf277205bf","Type":"ContainerStarted","Data":"b9109d617212f67db28d251945f5a34ca640b13d9775ffc279b08cdc866253ee"} Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.571209 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.578236 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/38a2f9f3-6ce0-4790-8d68-59a8fc723caa-memberlist\") pod \"speaker-fmdjd\" (UID: \"38a2f9f3-6ce0-4790-8d68-59a8fc723caa\") " pod="metallb-system/speaker-fmdjd" Dec 02 18:53:11 crc kubenswrapper[4792]: I1202 18:53:11.630186 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fmdjd" Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.254867 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fmdjd" event={"ID":"38a2f9f3-6ce0-4790-8d68-59a8fc723caa","Type":"ContainerStarted","Data":"5a9edcb9a29789a9a6277ace4ad2dea91aa3c49c3aaecad77941a442af1575d6"} Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.254918 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fmdjd" event={"ID":"38a2f9f3-6ce0-4790-8d68-59a8fc723caa","Type":"ContainerStarted","Data":"a2b4f9883f50f6a93619ef45d77a2212e4b6efd60795bb774cf24a225f19d7fd"} Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.254932 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fmdjd" event={"ID":"38a2f9f3-6ce0-4790-8d68-59a8fc723caa","Type":"ContainerStarted","Data":"ce0b77c2ed5d7d27d01174f395c3793ee4cd118441047b6ba4fdfb58f759c661"} Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.255126 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-fmdjd" Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.258347 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-pjmcf" event={"ID":"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6","Type":"ContainerStarted","Data":"13fcea1a448ee4b1291c0e69ab2bdcae5664b5600173c9e6ea705a712e380017"} Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.258380 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-pjmcf" event={"ID":"cbe95ba3-c2a1-4755-8571-ddaba0aca9d6","Type":"ContainerStarted","Data":"b6295c984e5112f00d2a4f0f4c3a89726d7e6a6a61f70f0ec719ef7baf0ee720"} Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.258481 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.271146 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-fmdjd" podStartSLOduration=3.271113149 podStartE2EDuration="3.271113149s" podCreationTimestamp="2025-12-02 18:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:53:12.268903251 +0000 UTC m=+1023.041795589" watchObservedRunningTime="2025-12-02 18:53:12.271113149 +0000 UTC m=+1023.044005477" Dec 02 18:53:12 crc kubenswrapper[4792]: I1202 18:53:12.288389 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-pjmcf" podStartSLOduration=3.288372677 podStartE2EDuration="3.288372677s" podCreationTimestamp="2025-12-02 18:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:53:12.286331944 +0000 UTC m=+1023.059224272" watchObservedRunningTime="2025-12-02 18:53:12.288372677 +0000 UTC m=+1023.061265005" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.393550 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-85cl4"] Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.395777 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.406555 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-85cl4"] Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.510893 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krl4d\" (UniqueName: \"kubernetes.io/projected/600572fb-c68d-4a1c-86ff-1f542564daeb-kube-api-access-krl4d\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.511257 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-catalog-content\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.511290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-utilities\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.613097 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krl4d\" (UniqueName: \"kubernetes.io/projected/600572fb-c68d-4a1c-86ff-1f542564daeb-kube-api-access-krl4d\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.613160 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-catalog-content\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.613199 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-utilities\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.613975 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-catalog-content\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.614127 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-utilities\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.636338 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krl4d\" (UniqueName: \"kubernetes.io/projected/600572fb-c68d-4a1c-86ff-1f542564daeb-kube-api-access-krl4d\") pod \"redhat-marketplace-85cl4\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:14 crc kubenswrapper[4792]: I1202 18:53:14.713479 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:15 crc kubenswrapper[4792]: I1202 18:53:15.131817 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-85cl4"] Dec 02 18:53:15 crc kubenswrapper[4792]: I1202 18:53:15.284573 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85cl4" event={"ID":"600572fb-c68d-4a1c-86ff-1f542564daeb","Type":"ContainerStarted","Data":"2a98fe62c6a8d946cf168cc8fdeb559b11e213f74d5df99935345f49273a1dc6"} Dec 02 18:53:15 crc kubenswrapper[4792]: I1202 18:53:15.695564 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:15 crc kubenswrapper[4792]: I1202 18:53:15.695626 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:15 crc kubenswrapper[4792]: I1202 18:53:15.741313 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:16 crc kubenswrapper[4792]: I1202 18:53:16.297762 4792 generic.go:334] "Generic (PLEG): container finished" podID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerID="e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf" exitCode=0 Dec 02 18:53:16 crc kubenswrapper[4792]: I1202 18:53:16.297813 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85cl4" event={"ID":"600572fb-c68d-4a1c-86ff-1f542564daeb","Type":"ContainerDied","Data":"e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf"} Dec 02 18:53:16 crc kubenswrapper[4792]: I1202 18:53:16.349961 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.171241 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pmltd"] Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.311186 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pmltd" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="registry-server" containerID="cri-o://e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb" gracePeriod=2 Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.824701 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.894192 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-catalog-content\") pod \"bb074f17-56fa-423c-958b-f37564b8433a\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.894258 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-utilities\") pod \"bb074f17-56fa-423c-958b-f37564b8433a\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.894348 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5db4j\" (UniqueName: \"kubernetes.io/projected/bb074f17-56fa-423c-958b-f37564b8433a-kube-api-access-5db4j\") pod \"bb074f17-56fa-423c-958b-f37564b8433a\" (UID: \"bb074f17-56fa-423c-958b-f37564b8433a\") " Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.895416 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-utilities" (OuterVolumeSpecName: "utilities") pod "bb074f17-56fa-423c-958b-f37564b8433a" (UID: "bb074f17-56fa-423c-958b-f37564b8433a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.935755 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb074f17-56fa-423c-958b-f37564b8433a-kube-api-access-5db4j" (OuterVolumeSpecName: "kube-api-access-5db4j") pod "bb074f17-56fa-423c-958b-f37564b8433a" (UID: "bb074f17-56fa-423c-958b-f37564b8433a"). InnerVolumeSpecName "kube-api-access-5db4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.942454 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb074f17-56fa-423c-958b-f37564b8433a" (UID: "bb074f17-56fa-423c-958b-f37564b8433a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.995907 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5db4j\" (UniqueName: \"kubernetes.io/projected/bb074f17-56fa-423c-958b-f37564b8433a-kube-api-access-5db4j\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.995941 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:18 crc kubenswrapper[4792]: I1202 18:53:18.995950 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb074f17-56fa-423c-958b-f37564b8433a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.324009 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" event={"ID":"4be13a2e-5592-4f44-ad74-31cf277205bf","Type":"ContainerStarted","Data":"0c37a24593072d52dfd7ab49127031b43134f9e34da11d1569c07ca85b89b0d5"} Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.324129 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.330415 4792 generic.go:334] "Generic (PLEG): container finished" podID="bb074f17-56fa-423c-958b-f37564b8433a" containerID="e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb" exitCode=0 Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.330485 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmltd" event={"ID":"bb074f17-56fa-423c-958b-f37564b8433a","Type":"ContainerDied","Data":"e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb"} Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.330573 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmltd" event={"ID":"bb074f17-56fa-423c-958b-f37564b8433a","Type":"ContainerDied","Data":"60dcd35978956f2ceb493b2590975a513edc7e30899968cd53d6732ec2d68453"} Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.330599 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmltd" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.330608 4792 scope.go:117] "RemoveContainer" containerID="e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.336554 4792 generic.go:334] "Generic (PLEG): container finished" podID="57c06fe7-dccf-4f91-a9b1-21d345dd688e" containerID="5b5123c107acaa839c60e450be3ce45d5a45b93acafe1583670a47c503f3d013" exitCode=0 Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.336605 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerDied","Data":"5b5123c107acaa839c60e450be3ce45d5a45b93acafe1583670a47c503f3d013"} Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.369128 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" podStartSLOduration=2.888043609 podStartE2EDuration="10.36910794s" podCreationTimestamp="2025-12-02 18:53:09 +0000 UTC" firstStartedPulling="2025-12-02 18:53:11.150696958 +0000 UTC m=+1021.923589296" lastFinishedPulling="2025-12-02 18:53:18.631761299 +0000 UTC m=+1029.404653627" observedRunningTime="2025-12-02 18:53:19.353216417 +0000 UTC m=+1030.126108795" watchObservedRunningTime="2025-12-02 18:53:19.36910794 +0000 UTC m=+1030.142000278" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.373566 4792 scope.go:117] "RemoveContainer" containerID="62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.426543 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pmltd"] Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.432591 4792 scope.go:117] "RemoveContainer" containerID="8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.438316 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pmltd"] Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.464637 4792 scope.go:117] "RemoveContainer" containerID="e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb" Dec 02 18:53:19 crc kubenswrapper[4792]: E1202 18:53:19.465076 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb\": container with ID starting with e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb not found: ID does not exist" containerID="e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.465113 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb"} err="failed to get container status \"e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb\": rpc error: code = NotFound desc = could not find container \"e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb\": container with ID starting with e5d650e44724b22a0426bf1c5dc7eb05507daed58a0f2a526cf179f7fcf1facb not found: ID does not exist" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.465140 4792 scope.go:117] "RemoveContainer" containerID="62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30" Dec 02 18:53:19 crc kubenswrapper[4792]: E1202 18:53:19.466065 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30\": container with ID starting with 62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30 not found: ID does not exist" containerID="62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.466092 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30"} err="failed to get container status \"62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30\": rpc error: code = NotFound desc = could not find container \"62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30\": container with ID starting with 62a6162ce852e5eb65d64eba3f18fbbdb2e6c3494a1109953ba94d57a1c3fb30 not found: ID does not exist" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.466112 4792 scope.go:117] "RemoveContainer" containerID="8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75" Dec 02 18:53:19 crc kubenswrapper[4792]: E1202 18:53:19.466613 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75\": container with ID starting with 8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75 not found: ID does not exist" containerID="8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.466678 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75"} err="failed to get container status \"8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75\": rpc error: code = NotFound desc = could not find container \"8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75\": container with ID starting with 8d09e738e5658bdb3dfaf052c2cd6047752392d5a446ee797c4fcaf356396d75 not found: ID does not exist" Dec 02 18:53:19 crc kubenswrapper[4792]: I1202 18:53:19.548067 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb074f17-56fa-423c-958b-f37564b8433a" path="/var/lib/kubelet/pods/bb074f17-56fa-423c-958b-f37564b8433a/volumes" Dec 02 18:53:20 crc kubenswrapper[4792]: I1202 18:53:20.347347 4792 generic.go:334] "Generic (PLEG): container finished" podID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerID="628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f" exitCode=0 Dec 02 18:53:20 crc kubenswrapper[4792]: I1202 18:53:20.347451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85cl4" event={"ID":"600572fb-c68d-4a1c-86ff-1f542564daeb","Type":"ContainerDied","Data":"628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f"} Dec 02 18:53:20 crc kubenswrapper[4792]: I1202 18:53:20.351413 4792 generic.go:334] "Generic (PLEG): container finished" podID="57c06fe7-dccf-4f91-a9b1-21d345dd688e" containerID="23c06e12285a836c25b2dbf395152a2e1984f3c9328f757532b62d67557443b1" exitCode=0 Dec 02 18:53:20 crc kubenswrapper[4792]: I1202 18:53:20.351548 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerDied","Data":"23c06e12285a836c25b2dbf395152a2e1984f3c9328f757532b62d67557443b1"} Dec 02 18:53:21 crc kubenswrapper[4792]: I1202 18:53:21.360768 4792 generic.go:334] "Generic (PLEG): container finished" podID="57c06fe7-dccf-4f91-a9b1-21d345dd688e" containerID="dd8db0b468fde1b66f92b93a8ef584ddddf81a08c5292854c09c667545d6b68a" exitCode=0 Dec 02 18:53:21 crc kubenswrapper[4792]: I1202 18:53:21.360817 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerDied","Data":"dd8db0b468fde1b66f92b93a8ef584ddddf81a08c5292854c09c667545d6b68a"} Dec 02 18:53:21 crc kubenswrapper[4792]: I1202 18:53:21.636735 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-fmdjd" Dec 02 18:53:22 crc kubenswrapper[4792]: I1202 18:53:22.372624 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85cl4" event={"ID":"600572fb-c68d-4a1c-86ff-1f542564daeb","Type":"ContainerStarted","Data":"fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33"} Dec 02 18:53:22 crc kubenswrapper[4792]: I1202 18:53:22.377556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerStarted","Data":"da997da9e29f597a9e24d857d1a25b2fe9b0a5f21cc8d6080d69361b1ef209d0"} Dec 02 18:53:22 crc kubenswrapper[4792]: I1202 18:53:22.377605 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerStarted","Data":"cbba4f26886c784fe4eef212ead91a221c1b4e9f1f69bd0cb6e4658fa8d2bc95"} Dec 02 18:53:22 crc kubenswrapper[4792]: I1202 18:53:22.377625 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerStarted","Data":"5f9d780f82e27052b079f988c6156f9b0ca58b6e99c471c04d45802c8e3e834d"} Dec 02 18:53:22 crc kubenswrapper[4792]: I1202 18:53:22.377641 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerStarted","Data":"a1bb182ddae28179071aa498da229ce64e0a6e63d7795c344ea997ed4d535011"} Dec 02 18:53:22 crc kubenswrapper[4792]: I1202 18:53:22.377659 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerStarted","Data":"75dc7adacbf46d58b869abccf88664f52a7ece02e4e9ed5006b430e58f378111"} Dec 02 18:53:22 crc kubenswrapper[4792]: I1202 18:53:22.394288 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-85cl4" podStartSLOduration=5.976461626 podStartE2EDuration="8.394272006s" podCreationTimestamp="2025-12-02 18:53:14 +0000 UTC" firstStartedPulling="2025-12-02 18:53:18.494900924 +0000 UTC m=+1029.267793292" lastFinishedPulling="2025-12-02 18:53:20.912711344 +0000 UTC m=+1031.685603672" observedRunningTime="2025-12-02 18:53:22.393196538 +0000 UTC m=+1033.166088866" watchObservedRunningTime="2025-12-02 18:53:22.394272006 +0000 UTC m=+1033.167164334" Dec 02 18:53:23 crc kubenswrapper[4792]: I1202 18:53:23.394845 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-st62r" event={"ID":"57c06fe7-dccf-4f91-a9b1-21d345dd688e","Type":"ContainerStarted","Data":"d1b46bbcb0f57dc97deedc6a0b750c25e138eb16ca7eb5df6eb62fedc9dbf9c3"} Dec 02 18:53:23 crc kubenswrapper[4792]: I1202 18:53:23.430797 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-st62r" podStartSLOduration=6.66725792 podStartE2EDuration="14.430769437s" podCreationTimestamp="2025-12-02 18:53:09 +0000 UTC" firstStartedPulling="2025-12-02 18:53:10.835338347 +0000 UTC m=+1021.608230685" lastFinishedPulling="2025-12-02 18:53:18.598849864 +0000 UTC m=+1029.371742202" observedRunningTime="2025-12-02 18:53:23.428868967 +0000 UTC m=+1034.201761375" watchObservedRunningTime="2025-12-02 18:53:23.430769437 +0000 UTC m=+1034.203661795" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.406420 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.714863 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.714921 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.799920 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.997463 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-trs7c"] Dec 02 18:53:24 crc kubenswrapper[4792]: E1202 18:53:24.997916 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="extract-utilities" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.997944 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="extract-utilities" Dec 02 18:53:24 crc kubenswrapper[4792]: E1202 18:53:24.997962 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="extract-content" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.997975 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="extract-content" Dec 02 18:53:24 crc kubenswrapper[4792]: E1202 18:53:24.998005 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="registry-server" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.998018 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="registry-server" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.998217 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb074f17-56fa-423c-958b-f37564b8433a" containerName="registry-server" Dec 02 18:53:24 crc kubenswrapper[4792]: I1202 18:53:24.998892 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-trs7c" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.050829 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-4xf8k" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.050891 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.051177 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.052912 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-trs7c"] Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.148750 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frxg9\" (UniqueName: \"kubernetes.io/projected/a8833bd8-851a-4a5b-84b8-19f6e4016ee7-kube-api-access-frxg9\") pod \"openstack-operator-index-trs7c\" (UID: \"a8833bd8-851a-4a5b-84b8-19f6e4016ee7\") " pod="openstack-operators/openstack-operator-index-trs7c" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.250711 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frxg9\" (UniqueName: \"kubernetes.io/projected/a8833bd8-851a-4a5b-84b8-19f6e4016ee7-kube-api-access-frxg9\") pod \"openstack-operator-index-trs7c\" (UID: \"a8833bd8-851a-4a5b-84b8-19f6e4016ee7\") " pod="openstack-operators/openstack-operator-index-trs7c" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.268187 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frxg9\" (UniqueName: \"kubernetes.io/projected/a8833bd8-851a-4a5b-84b8-19f6e4016ee7-kube-api-access-frxg9\") pod \"openstack-operator-index-trs7c\" (UID: \"a8833bd8-851a-4a5b-84b8-19f6e4016ee7\") " pod="openstack-operators/openstack-operator-index-trs7c" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.373266 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-trs7c" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.642102 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-trs7c"] Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.654886 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:25 crc kubenswrapper[4792]: I1202 18:53:25.722279 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:26 crc kubenswrapper[4792]: I1202 18:53:26.428575 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-trs7c" event={"ID":"a8833bd8-851a-4a5b-84b8-19f6e4016ee7","Type":"ContainerStarted","Data":"daf5ed9bbd159851c981dfddd5a4a3b0159dbd507c8d7e390b8ecaba47ffb2ed"} Dec 02 18:53:27 crc kubenswrapper[4792]: I1202 18:53:27.847759 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-trs7c"] Dec 02 18:53:28 crc kubenswrapper[4792]: I1202 18:53:28.457464 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-gm2bf"] Dec 02 18:53:28 crc kubenswrapper[4792]: I1202 18:53:28.459203 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:28 crc kubenswrapper[4792]: I1202 18:53:28.465979 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gm2bf"] Dec 02 18:53:28 crc kubenswrapper[4792]: I1202 18:53:28.601563 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjphz\" (UniqueName: \"kubernetes.io/projected/15960e0b-8e49-4e4b-b236-5efc49470e11-kube-api-access-kjphz\") pod \"openstack-operator-index-gm2bf\" (UID: \"15960e0b-8e49-4e4b-b236-5efc49470e11\") " pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:28 crc kubenswrapper[4792]: I1202 18:53:28.703794 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjphz\" (UniqueName: \"kubernetes.io/projected/15960e0b-8e49-4e4b-b236-5efc49470e11-kube-api-access-kjphz\") pod \"openstack-operator-index-gm2bf\" (UID: \"15960e0b-8e49-4e4b-b236-5efc49470e11\") " pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:28 crc kubenswrapper[4792]: I1202 18:53:28.726240 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjphz\" (UniqueName: \"kubernetes.io/projected/15960e0b-8e49-4e4b-b236-5efc49470e11-kube-api-access-kjphz\") pod \"openstack-operator-index-gm2bf\" (UID: \"15960e0b-8e49-4e4b-b236-5efc49470e11\") " pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:28 crc kubenswrapper[4792]: I1202 18:53:28.784184 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:29 crc kubenswrapper[4792]: I1202 18:53:29.269924 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gm2bf"] Dec 02 18:53:29 crc kubenswrapper[4792]: W1202 18:53:29.275567 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15960e0b_8e49_4e4b_b236_5efc49470e11.slice/crio-67b68318293e35937589238d87cd67948df4a79918a68904489539c68e8f25dd WatchSource:0}: Error finding container 67b68318293e35937589238d87cd67948df4a79918a68904489539c68e8f25dd: Status 404 returned error can't find the container with id 67b68318293e35937589238d87cd67948df4a79918a68904489539c68e8f25dd Dec 02 18:53:29 crc kubenswrapper[4792]: I1202 18:53:29.499328 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gm2bf" event={"ID":"15960e0b-8e49-4e4b-b236-5efc49470e11","Type":"ContainerStarted","Data":"67b68318293e35937589238d87cd67948df4a79918a68904489539c68e8f25dd"} Dec 02 18:53:29 crc kubenswrapper[4792]: I1202 18:53:29.502330 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-trs7c" event={"ID":"a8833bd8-851a-4a5b-84b8-19f6e4016ee7","Type":"ContainerStarted","Data":"2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2"} Dec 02 18:53:29 crc kubenswrapper[4792]: I1202 18:53:29.502689 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-trs7c" podUID="a8833bd8-851a-4a5b-84b8-19f6e4016ee7" containerName="registry-server" containerID="cri-o://2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2" gracePeriod=2 Dec 02 18:53:29 crc kubenswrapper[4792]: I1202 18:53:29.535816 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-trs7c" podStartSLOduration=2.613344588 podStartE2EDuration="5.535786071s" podCreationTimestamp="2025-12-02 18:53:24 +0000 UTC" firstStartedPulling="2025-12-02 18:53:25.661408417 +0000 UTC m=+1036.434300745" lastFinishedPulling="2025-12-02 18:53:28.58384989 +0000 UTC m=+1039.356742228" observedRunningTime="2025-12-02 18:53:29.530760467 +0000 UTC m=+1040.303652805" watchObservedRunningTime="2025-12-02 18:53:29.535786071 +0000 UTC m=+1040.308678449" Dec 02 18:53:29 crc kubenswrapper[4792]: I1202 18:53:29.959684 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-trs7c" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.127741 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frxg9\" (UniqueName: \"kubernetes.io/projected/a8833bd8-851a-4a5b-84b8-19f6e4016ee7-kube-api-access-frxg9\") pod \"a8833bd8-851a-4a5b-84b8-19f6e4016ee7\" (UID: \"a8833bd8-851a-4a5b-84b8-19f6e4016ee7\") " Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.136979 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8833bd8-851a-4a5b-84b8-19f6e4016ee7-kube-api-access-frxg9" (OuterVolumeSpecName: "kube-api-access-frxg9") pod "a8833bd8-851a-4a5b-84b8-19f6e4016ee7" (UID: "a8833bd8-851a-4a5b-84b8-19f6e4016ee7"). InnerVolumeSpecName "kube-api-access-frxg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.229285 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frxg9\" (UniqueName: \"kubernetes.io/projected/a8833bd8-851a-4a5b-84b8-19f6e4016ee7-kube-api-access-frxg9\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.512218 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gm2bf" event={"ID":"15960e0b-8e49-4e4b-b236-5efc49470e11","Type":"ContainerStarted","Data":"f9fce8efee9d92efdac6d9f26b6fe0a12a179f5f9db941a8ff56717df995d536"} Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.515944 4792 generic.go:334] "Generic (PLEG): container finished" podID="a8833bd8-851a-4a5b-84b8-19f6e4016ee7" containerID="2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2" exitCode=0 Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.516014 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-trs7c" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.516011 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-trs7c" event={"ID":"a8833bd8-851a-4a5b-84b8-19f6e4016ee7","Type":"ContainerDied","Data":"2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2"} Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.516241 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-trs7c" event={"ID":"a8833bd8-851a-4a5b-84b8-19f6e4016ee7","Type":"ContainerDied","Data":"daf5ed9bbd159851c981dfddd5a4a3b0159dbd507c8d7e390b8ecaba47ffb2ed"} Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.516288 4792 scope.go:117] "RemoveContainer" containerID="2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.539865 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-gm2bf" podStartSLOduration=2.474105085 podStartE2EDuration="2.539837822s" podCreationTimestamp="2025-12-02 18:53:28 +0000 UTC" firstStartedPulling="2025-12-02 18:53:29.281894857 +0000 UTC m=+1040.054787215" lastFinishedPulling="2025-12-02 18:53:29.347627584 +0000 UTC m=+1040.120519952" observedRunningTime="2025-12-02 18:53:30.537676538 +0000 UTC m=+1041.310568906" watchObservedRunningTime="2025-12-02 18:53:30.539837822 +0000 UTC m=+1041.312730180" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.542067 4792 scope.go:117] "RemoveContainer" containerID="2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2" Dec 02 18:53:30 crc kubenswrapper[4792]: E1202 18:53:30.542836 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2\": container with ID starting with 2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2 not found: ID does not exist" containerID="2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.542924 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2"} err="failed to get container status \"2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2\": rpc error: code = NotFound desc = could not find container \"2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2\": container with ID starting with 2e24033f3de8c7343965fe01864ff54c2c4af28a56fc24d7822716b73a73e0c2 not found: ID does not exist" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.576597 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-trs7c"] Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.582328 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-trs7c"] Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.674770 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cjmwb" Dec 02 18:53:30 crc kubenswrapper[4792]: I1202 18:53:30.819504 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-pjmcf" Dec 02 18:53:31 crc kubenswrapper[4792]: I1202 18:53:31.552634 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8833bd8-851a-4a5b-84b8-19f6e4016ee7" path="/var/lib/kubelet/pods/a8833bd8-851a-4a5b-84b8-19f6e4016ee7/volumes" Dec 02 18:53:34 crc kubenswrapper[4792]: I1202 18:53:34.785352 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:37 crc kubenswrapper[4792]: I1202 18:53:37.842213 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-85cl4"] Dec 02 18:53:37 crc kubenswrapper[4792]: I1202 18:53:37.842964 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-85cl4" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="registry-server" containerID="cri-o://fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33" gracePeriod=2 Dec 02 18:53:38 crc kubenswrapper[4792]: I1202 18:53:38.785206 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:38 crc kubenswrapper[4792]: I1202 18:53:38.785256 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:38 crc kubenswrapper[4792]: I1202 18:53:38.831874 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.373064 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.480617 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-utilities\") pod \"600572fb-c68d-4a1c-86ff-1f542564daeb\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.480829 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krl4d\" (UniqueName: \"kubernetes.io/projected/600572fb-c68d-4a1c-86ff-1f542564daeb-kube-api-access-krl4d\") pod \"600572fb-c68d-4a1c-86ff-1f542564daeb\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.480863 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-catalog-content\") pod \"600572fb-c68d-4a1c-86ff-1f542564daeb\" (UID: \"600572fb-c68d-4a1c-86ff-1f542564daeb\") " Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.481868 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-utilities" (OuterVolumeSpecName: "utilities") pod "600572fb-c68d-4a1c-86ff-1f542564daeb" (UID: "600572fb-c68d-4a1c-86ff-1f542564daeb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.486962 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/600572fb-c68d-4a1c-86ff-1f542564daeb-kube-api-access-krl4d" (OuterVolumeSpecName: "kube-api-access-krl4d") pod "600572fb-c68d-4a1c-86ff-1f542564daeb" (UID: "600572fb-c68d-4a1c-86ff-1f542564daeb"). InnerVolumeSpecName "kube-api-access-krl4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.509158 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "600572fb-c68d-4a1c-86ff-1f542564daeb" (UID: "600572fb-c68d-4a1c-86ff-1f542564daeb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.582406 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krl4d\" (UniqueName: \"kubernetes.io/projected/600572fb-c68d-4a1c-86ff-1f542564daeb-kube-api-access-krl4d\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.582449 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.582467 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/600572fb-c68d-4a1c-86ff-1f542564daeb-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.594380 4792 generic.go:334] "Generic (PLEG): container finished" podID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerID="fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33" exitCode=0 Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.594737 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85cl4" event={"ID":"600572fb-c68d-4a1c-86ff-1f542564daeb","Type":"ContainerDied","Data":"fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33"} Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.594827 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85cl4" event={"ID":"600572fb-c68d-4a1c-86ff-1f542564daeb","Type":"ContainerDied","Data":"2a98fe62c6a8d946cf168cc8fdeb559b11e213f74d5df99935345f49273a1dc6"} Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.594865 4792 scope.go:117] "RemoveContainer" containerID="fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.594765 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85cl4" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.623476 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-85cl4"] Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.629880 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-85cl4"] Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.630944 4792 scope.go:117] "RemoveContainer" containerID="628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.632384 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-gm2bf" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.668056 4792 scope.go:117] "RemoveContainer" containerID="e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.704667 4792 scope.go:117] "RemoveContainer" containerID="fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33" Dec 02 18:53:39 crc kubenswrapper[4792]: E1202 18:53:39.705680 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33\": container with ID starting with fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33 not found: ID does not exist" containerID="fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.705782 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33"} err="failed to get container status \"fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33\": rpc error: code = NotFound desc = could not find container \"fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33\": container with ID starting with fa2d29f367c2442c760c0f337f77575e9f89b16371f9b9cc2e9ae7da76640d33 not found: ID does not exist" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.705915 4792 scope.go:117] "RemoveContainer" containerID="628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f" Dec 02 18:53:39 crc kubenswrapper[4792]: E1202 18:53:39.706279 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f\": container with ID starting with 628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f not found: ID does not exist" containerID="628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.706358 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f"} err="failed to get container status \"628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f\": rpc error: code = NotFound desc = could not find container \"628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f\": container with ID starting with 628305a67dafe47e5959c1191e8d9f6ec8883665e8211e2573ea385a7121671f not found: ID does not exist" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.706393 4792 scope.go:117] "RemoveContainer" containerID="e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf" Dec 02 18:53:39 crc kubenswrapper[4792]: E1202 18:53:39.706713 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf\": container with ID starting with e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf not found: ID does not exist" containerID="e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf" Dec 02 18:53:39 crc kubenswrapper[4792]: I1202 18:53:39.706760 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf"} err="failed to get container status \"e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf\": rpc error: code = NotFound desc = could not find container \"e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf\": container with ID starting with e831f5d10a8241cd65f9a4c0f2c4fe69c1fb5d3c717fa92674c29f3d143b0daf not found: ID does not exist" Dec 02 18:53:40 crc kubenswrapper[4792]: I1202 18:53:40.656800 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-st62r" Dec 02 18:53:41 crc kubenswrapper[4792]: I1202 18:53:41.556665 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" path="/var/lib/kubelet/pods/600572fb-c68d-4a1c-86ff-1f542564daeb/volumes" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.108805 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn"] Dec 02 18:53:46 crc kubenswrapper[4792]: E1202 18:53:46.110193 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="extract-content" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.110218 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="extract-content" Dec 02 18:53:46 crc kubenswrapper[4792]: E1202 18:53:46.110243 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="registry-server" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.110256 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="registry-server" Dec 02 18:53:46 crc kubenswrapper[4792]: E1202 18:53:46.110287 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="extract-utilities" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.110302 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="extract-utilities" Dec 02 18:53:46 crc kubenswrapper[4792]: E1202 18:53:46.110332 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8833bd8-851a-4a5b-84b8-19f6e4016ee7" containerName="registry-server" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.110346 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8833bd8-851a-4a5b-84b8-19f6e4016ee7" containerName="registry-server" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.110590 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="600572fb-c68d-4a1c-86ff-1f542564daeb" containerName="registry-server" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.110630 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8833bd8-851a-4a5b-84b8-19f6e4016ee7" containerName="registry-server" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.112068 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.116021 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-swxxd" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.140255 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn"] Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.191773 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-bundle\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.192184 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-util\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.192407 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlxw4\" (UniqueName: \"kubernetes.io/projected/f6b14071-1f38-444e-8b2e-30e7eb904e0f-kube-api-access-vlxw4\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.294275 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-util\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.294389 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlxw4\" (UniqueName: \"kubernetes.io/projected/f6b14071-1f38-444e-8b2e-30e7eb904e0f-kube-api-access-vlxw4\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.294469 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-bundle\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.295324 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-bundle\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.295730 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-util\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.328080 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlxw4\" (UniqueName: \"kubernetes.io/projected/f6b14071-1f38-444e-8b2e-30e7eb904e0f-kube-api-access-vlxw4\") pod \"e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.449275 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:46 crc kubenswrapper[4792]: I1202 18:53:46.739324 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn"] Dec 02 18:53:46 crc kubenswrapper[4792]: W1202 18:53:46.749397 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6b14071_1f38_444e_8b2e_30e7eb904e0f.slice/crio-dee6853a9c5404e760f715cbc5af40914e040b5de460c3ea8dd7abcbc04935ad WatchSource:0}: Error finding container dee6853a9c5404e760f715cbc5af40914e040b5de460c3ea8dd7abcbc04935ad: Status 404 returned error can't find the container with id dee6853a9c5404e760f715cbc5af40914e040b5de460c3ea8dd7abcbc04935ad Dec 02 18:53:47 crc kubenswrapper[4792]: I1202 18:53:47.685574 4792 generic.go:334] "Generic (PLEG): container finished" podID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerID="cc38c4668dbdfb0aa13f3343ef5a4b8c2184bf0e43a76cdf5033d4d37850116b" exitCode=0 Dec 02 18:53:47 crc kubenswrapper[4792]: I1202 18:53:47.685652 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" event={"ID":"f6b14071-1f38-444e-8b2e-30e7eb904e0f","Type":"ContainerDied","Data":"cc38c4668dbdfb0aa13f3343ef5a4b8c2184bf0e43a76cdf5033d4d37850116b"} Dec 02 18:53:47 crc kubenswrapper[4792]: I1202 18:53:47.685919 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" event={"ID":"f6b14071-1f38-444e-8b2e-30e7eb904e0f","Type":"ContainerStarted","Data":"dee6853a9c5404e760f715cbc5af40914e040b5de460c3ea8dd7abcbc04935ad"} Dec 02 18:53:48 crc kubenswrapper[4792]: I1202 18:53:48.698663 4792 generic.go:334] "Generic (PLEG): container finished" podID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerID="8dfa023ef319456ef95f78644c679a52fa9e699f250ecd749c2f222a0d2fce7b" exitCode=0 Dec 02 18:53:48 crc kubenswrapper[4792]: I1202 18:53:48.698736 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" event={"ID":"f6b14071-1f38-444e-8b2e-30e7eb904e0f","Type":"ContainerDied","Data":"8dfa023ef319456ef95f78644c679a52fa9e699f250ecd749c2f222a0d2fce7b"} Dec 02 18:53:49 crc kubenswrapper[4792]: I1202 18:53:49.712705 4792 generic.go:334] "Generic (PLEG): container finished" podID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerID="6dd88b5b28ad81f271245f1f4101e6fc3f57571e801611fd593fbba1dcad469a" exitCode=0 Dec 02 18:53:49 crc kubenswrapper[4792]: I1202 18:53:49.712760 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" event={"ID":"f6b14071-1f38-444e-8b2e-30e7eb904e0f","Type":"ContainerDied","Data":"6dd88b5b28ad81f271245f1f4101e6fc3f57571e801611fd593fbba1dcad469a"} Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.081129 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.172312 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlxw4\" (UniqueName: \"kubernetes.io/projected/f6b14071-1f38-444e-8b2e-30e7eb904e0f-kube-api-access-vlxw4\") pod \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.172412 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-util\") pod \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.172512 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-bundle\") pod \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\" (UID: \"f6b14071-1f38-444e-8b2e-30e7eb904e0f\") " Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.174033 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-bundle" (OuterVolumeSpecName: "bundle") pod "f6b14071-1f38-444e-8b2e-30e7eb904e0f" (UID: "f6b14071-1f38-444e-8b2e-30e7eb904e0f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.175163 4792 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.178240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6b14071-1f38-444e-8b2e-30e7eb904e0f-kube-api-access-vlxw4" (OuterVolumeSpecName: "kube-api-access-vlxw4") pod "f6b14071-1f38-444e-8b2e-30e7eb904e0f" (UID: "f6b14071-1f38-444e-8b2e-30e7eb904e0f"). InnerVolumeSpecName "kube-api-access-vlxw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.195722 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-util" (OuterVolumeSpecName: "util") pod "f6b14071-1f38-444e-8b2e-30e7eb904e0f" (UID: "f6b14071-1f38-444e-8b2e-30e7eb904e0f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.276209 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlxw4\" (UniqueName: \"kubernetes.io/projected/f6b14071-1f38-444e-8b2e-30e7eb904e0f-kube-api-access-vlxw4\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.276241 4792 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f6b14071-1f38-444e-8b2e-30e7eb904e0f-util\") on node \"crc\" DevicePath \"\"" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.732495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" event={"ID":"f6b14071-1f38-444e-8b2e-30e7eb904e0f","Type":"ContainerDied","Data":"dee6853a9c5404e760f715cbc5af40914e040b5de460c3ea8dd7abcbc04935ad"} Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.732601 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dee6853a9c5404e760f715cbc5af40914e040b5de460c3ea8dd7abcbc04935ad" Dec 02 18:53:51 crc kubenswrapper[4792]: I1202 18:53:51.732607 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.186884 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g"] Dec 02 18:53:58 crc kubenswrapper[4792]: E1202 18:53:58.187647 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerName="util" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.187662 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerName="util" Dec 02 18:53:58 crc kubenswrapper[4792]: E1202 18:53:58.187683 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerName="extract" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.187692 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerName="extract" Dec 02 18:53:58 crc kubenswrapper[4792]: E1202 18:53:58.187711 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerName="pull" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.187720 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerName="pull" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.187857 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6b14071-1f38-444e-8b2e-30e7eb904e0f" containerName="extract" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.188570 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" Dec 02 18:53:58 crc kubenswrapper[4792]: W1202 18:53:58.190315 4792 reflector.go:561] object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qn694": failed to list *v1.Secret: secrets "openstack-operator-controller-operator-dockercfg-qn694" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Dec 02 18:53:58 crc kubenswrapper[4792]: E1202 18:53:58.190367 4792 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"openstack-operator-controller-operator-dockercfg-qn694\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openstack-operator-controller-operator-dockercfg-qn694\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.213045 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g"] Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.283907 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb9fc\" (UniqueName: \"kubernetes.io/projected/b729afba-684d-4ecf-a503-fadb0e933192-kube-api-access-pb9fc\") pod \"openstack-operator-controller-operator-6c49cf65b-lk99g\" (UID: \"b729afba-684d-4ecf-a503-fadb0e933192\") " pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.385490 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb9fc\" (UniqueName: \"kubernetes.io/projected/b729afba-684d-4ecf-a503-fadb0e933192-kube-api-access-pb9fc\") pod \"openstack-operator-controller-operator-6c49cf65b-lk99g\" (UID: \"b729afba-684d-4ecf-a503-fadb0e933192\") " pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" Dec 02 18:53:58 crc kubenswrapper[4792]: I1202 18:53:58.412427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb9fc\" (UniqueName: \"kubernetes.io/projected/b729afba-684d-4ecf-a503-fadb0e933192-kube-api-access-pb9fc\") pod \"openstack-operator-controller-operator-6c49cf65b-lk99g\" (UID: \"b729afba-684d-4ecf-a503-fadb0e933192\") " pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" Dec 02 18:53:59 crc kubenswrapper[4792]: I1202 18:53:59.378623 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qn694" Dec 02 18:53:59 crc kubenswrapper[4792]: I1202 18:53:59.387256 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" Dec 02 18:53:59 crc kubenswrapper[4792]: I1202 18:53:59.935225 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g"] Dec 02 18:54:00 crc kubenswrapper[4792]: I1202 18:54:00.812719 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" event={"ID":"b729afba-684d-4ecf-a503-fadb0e933192","Type":"ContainerStarted","Data":"d49c7d163e4c6df17223a62352796e10dc8e7a4a92df108bdfc610cf5e7348e5"} Dec 02 18:54:04 crc kubenswrapper[4792]: I1202 18:54:04.845854 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" event={"ID":"b729afba-684d-4ecf-a503-fadb0e933192","Type":"ContainerStarted","Data":"88d28ed3dc88c95f170562353add55dbc32df273f5c2b9ebe97fb79aa7079198"} Dec 02 18:54:04 crc kubenswrapper[4792]: I1202 18:54:04.850667 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" Dec 02 18:54:04 crc kubenswrapper[4792]: I1202 18:54:04.896126 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" podStartSLOduration=2.883813473 podStartE2EDuration="6.896102861s" podCreationTimestamp="2025-12-02 18:53:58 +0000 UTC" firstStartedPulling="2025-12-02 18:53:59.943901589 +0000 UTC m=+1070.716793957" lastFinishedPulling="2025-12-02 18:54:03.956190977 +0000 UTC m=+1074.729083345" observedRunningTime="2025-12-02 18:54:04.893274141 +0000 UTC m=+1075.666166499" watchObservedRunningTime="2025-12-02 18:54:04.896102861 +0000 UTC m=+1075.668995219" Dec 02 18:54:09 crc kubenswrapper[4792]: I1202 18:54:09.392706 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6c49cf65b-lk99g" Dec 02 18:54:38 crc kubenswrapper[4792]: I1202 18:54:38.081209 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:54:38 crc kubenswrapper[4792]: I1202 18:54:38.081920 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.256616 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.260311 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.262078 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-qdx8p" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.267321 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.268662 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.272130 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-vwp8k" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.279606 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.299724 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.303903 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.305974 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-zxkkx" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.317361 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.325942 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.326887 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.329447 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-nsc84" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.345509 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.365603 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.366949 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.374940 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-rx8qv" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.381425 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.386655 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgr2p\" (UniqueName: \"kubernetes.io/projected/9f1a320f-5255-4fc4-b973-39ce2aee3bae-kube-api-access-sgr2p\") pod \"cinder-operator-controller-manager-859b6ccc6-fmggq\" (UID: \"9f1a320f-5255-4fc4-b973-39ce2aee3bae\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.386705 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jghrx\" (UniqueName: \"kubernetes.io/projected/77a52e44-0fcb-4b97-93de-0d26a6901c37-kube-api-access-jghrx\") pod \"heat-operator-controller-manager-5f64f6f8bb-xbg4j\" (UID: \"77a52e44-0fcb-4b97-93de-0d26a6901c37\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.386726 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn2st\" (UniqueName: \"kubernetes.io/projected/851b5fce-f6b9-4fef-a80c-e66336c5fa49-kube-api-access-mn2st\") pod \"glance-operator-controller-manager-77987cd8cd-zpmp6\" (UID: \"851b5fce-f6b9-4fef-a80c-e66336c5fa49\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.386766 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgl6t\" (UniqueName: \"kubernetes.io/projected/daa6dcd6-39c7-44fc-9754-7de254748ec3-kube-api-access-fgl6t\") pod \"barbican-operator-controller-manager-7d9dfd778-89kmd\" (UID: \"daa6dcd6-39c7-44fc-9754-7de254748ec3\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.386793 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9z9g\" (UniqueName: \"kubernetes.io/projected/fce25a63-01bd-458a-9567-f08f710abec9-kube-api-access-g9z9g\") pod \"designate-operator-controller-manager-78b4bc895b-7qgqw\" (UID: \"fce25a63-01bd-458a-9567-f08f710abec9\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.419580 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.420692 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.423408 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-nljpf" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.443086 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.447381 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.449878 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.454154 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-5jmht" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.454386 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.456586 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.482003 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.483021 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.485304 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.485928 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-xcqxl" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.488329 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9z9g\" (UniqueName: \"kubernetes.io/projected/fce25a63-01bd-458a-9567-f08f710abec9-kube-api-access-g9z9g\") pod \"designate-operator-controller-manager-78b4bc895b-7qgqw\" (UID: \"fce25a63-01bd-458a-9567-f08f710abec9\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.488405 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgr2p\" (UniqueName: \"kubernetes.io/projected/9f1a320f-5255-4fc4-b973-39ce2aee3bae-kube-api-access-sgr2p\") pod \"cinder-operator-controller-manager-859b6ccc6-fmggq\" (UID: \"9f1a320f-5255-4fc4-b973-39ce2aee3bae\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.488436 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jghrx\" (UniqueName: \"kubernetes.io/projected/77a52e44-0fcb-4b97-93de-0d26a6901c37-kube-api-access-jghrx\") pod \"heat-operator-controller-manager-5f64f6f8bb-xbg4j\" (UID: \"77a52e44-0fcb-4b97-93de-0d26a6901c37\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.488472 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn2st\" (UniqueName: \"kubernetes.io/projected/851b5fce-f6b9-4fef-a80c-e66336c5fa49-kube-api-access-mn2st\") pod \"glance-operator-controller-manager-77987cd8cd-zpmp6\" (UID: \"851b5fce-f6b9-4fef-a80c-e66336c5fa49\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.488536 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgl6t\" (UniqueName: \"kubernetes.io/projected/daa6dcd6-39c7-44fc-9754-7de254748ec3-kube-api-access-fgl6t\") pod \"barbican-operator-controller-manager-7d9dfd778-89kmd\" (UID: \"daa6dcd6-39c7-44fc-9754-7de254748ec3\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.493567 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.494906 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.501111 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.502333 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.502846 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-jlj4c" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.503510 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.504744 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-926gm" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.517665 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgl6t\" (UniqueName: \"kubernetes.io/projected/daa6dcd6-39c7-44fc-9754-7de254748ec3-kube-api-access-fgl6t\") pod \"barbican-operator-controller-manager-7d9dfd778-89kmd\" (UID: \"daa6dcd6-39c7-44fc-9754-7de254748ec3\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.527642 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.531844 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9z9g\" (UniqueName: \"kubernetes.io/projected/fce25a63-01bd-458a-9567-f08f710abec9-kube-api-access-g9z9g\") pod \"designate-operator-controller-manager-78b4bc895b-7qgqw\" (UID: \"fce25a63-01bd-458a-9567-f08f710abec9\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.533880 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn2st\" (UniqueName: \"kubernetes.io/projected/851b5fce-f6b9-4fef-a80c-e66336c5fa49-kube-api-access-mn2st\") pod \"glance-operator-controller-manager-77987cd8cd-zpmp6\" (UID: \"851b5fce-f6b9-4fef-a80c-e66336c5fa49\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.540479 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgr2p\" (UniqueName: \"kubernetes.io/projected/9f1a320f-5255-4fc4-b973-39ce2aee3bae-kube-api-access-sgr2p\") pod \"cinder-operator-controller-manager-859b6ccc6-fmggq\" (UID: \"9f1a320f-5255-4fc4-b973-39ce2aee3bae\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.559162 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jghrx\" (UniqueName: \"kubernetes.io/projected/77a52e44-0fcb-4b97-93de-0d26a6901c37-kube-api-access-jghrx\") pod \"heat-operator-controller-manager-5f64f6f8bb-xbg4j\" (UID: \"77a52e44-0fcb-4b97-93de-0d26a6901c37\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.570358 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.580083 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.585571 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.586616 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.590040 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.622556 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-9vzdb" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.625157 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.625213 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jsbd\" (UniqueName: \"kubernetes.io/projected/a725d6d0-4642-4316-9e67-e002d58f7117-kube-api-access-8jsbd\") pod \"horizon-operator-controller-manager-68c6d99b8f-ddpv4\" (UID: \"a725d6d0-4642-4316-9e67-e002d58f7117\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.625508 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpgdm\" (UniqueName: \"kubernetes.io/projected/4998553b-ffbc-4684-9756-22885fec1a98-kube-api-access-qpgdm\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.626547 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49b25\" (UniqueName: \"kubernetes.io/projected/9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9-kube-api-access-49b25\") pod \"keystone-operator-controller-manager-7765d96ddf-xrbqg\" (UID: \"9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.632651 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.634183 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.634259 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.642512 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-dmkpg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.683835 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.695867 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.708536 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.717502 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.718913 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.722991 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-7gkfd" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.731291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.731343 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jsbd\" (UniqueName: \"kubernetes.io/projected/a725d6d0-4642-4316-9e67-e002d58f7117-kube-api-access-8jsbd\") pod \"horizon-operator-controller-manager-68c6d99b8f-ddpv4\" (UID: \"a725d6d0-4642-4316-9e67-e002d58f7117\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.731379 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hfh4\" (UniqueName: \"kubernetes.io/projected/15e497ba-5375-4926-80f5-f46940572f8f-kube-api-access-6hfh4\") pod \"manila-operator-controller-manager-7c79b5df47-g8nwt\" (UID: \"15e497ba-5375-4926-80f5-f46940572f8f\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.731424 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk9l6\" (UniqueName: \"kubernetes.io/projected/e3a43b00-c682-4d04-9996-ceb79a245a18-kube-api-access-nk9l6\") pod \"ironic-operator-controller-manager-6c548fd776-92mth\" (UID: \"e3a43b00-c682-4d04-9996-ceb79a245a18\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.731474 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpgdm\" (UniqueName: \"kubernetes.io/projected/4998553b-ffbc-4684-9756-22885fec1a98-kube-api-access-qpgdm\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.731490 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9gj5\" (UniqueName: \"kubernetes.io/projected/2abae0c8-1cd8-4329-a4dc-678124e1195a-kube-api-access-d9gj5\") pod \"mariadb-operator-controller-manager-56bbcc9d85-zm7n7\" (UID: \"2abae0c8-1cd8-4329-a4dc-678124e1195a\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.731551 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49b25\" (UniqueName: \"kubernetes.io/projected/9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9-kube-api-access-49b25\") pod \"keystone-operator-controller-manager-7765d96ddf-xrbqg\" (UID: \"9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" Dec 02 18:54:47 crc kubenswrapper[4792]: E1202 18:54:47.732972 4792 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:47 crc kubenswrapper[4792]: E1202 18:54:47.733018 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert podName:4998553b-ffbc-4684-9756-22885fec1a98 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:48.233000895 +0000 UTC m=+1119.005893223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert") pod "infra-operator-controller-manager-57548d458d-l4tvg" (UID: "4998553b-ffbc-4684-9756-22885fec1a98") : secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.750278 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.754427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49b25\" (UniqueName: \"kubernetes.io/projected/9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9-kube-api-access-49b25\") pod \"keystone-operator-controller-manager-7765d96ddf-xrbqg\" (UID: \"9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.759352 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpgdm\" (UniqueName: \"kubernetes.io/projected/4998553b-ffbc-4684-9756-22885fec1a98-kube-api-access-qpgdm\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.764394 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-98tg4"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.768475 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.772211 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-mnxb4" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.776826 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jsbd\" (UniqueName: \"kubernetes.io/projected/a725d6d0-4642-4316-9e67-e002d58f7117-kube-api-access-8jsbd\") pod \"horizon-operator-controller-manager-68c6d99b8f-ddpv4\" (UID: \"a725d6d0-4642-4316-9e67-e002d58f7117\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.782382 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.790612 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.791636 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.794396 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-x6wwz" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.794569 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.800246 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-98tg4"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.815325 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.816428 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.833607 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9gj5\" (UniqueName: \"kubernetes.io/projected/2abae0c8-1cd8-4329-a4dc-678124e1195a-kube-api-access-d9gj5\") pod \"mariadb-operator-controller-manager-56bbcc9d85-zm7n7\" (UID: \"2abae0c8-1cd8-4329-a4dc-678124e1195a\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.833665 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b9jj\" (UniqueName: \"kubernetes.io/projected/ef4ea028-2f42-4560-aad7-94553ba2d3d4-kube-api-access-8b9jj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-b7kc7\" (UID: \"ef4ea028-2f42-4560-aad7-94553ba2d3d4\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.833789 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzgd5\" (UniqueName: \"kubernetes.io/projected/588c52cc-05c0-438d-bb0f-80bc1236d8cc-kube-api-access-wzgd5\") pod \"nova-operator-controller-manager-697bc559fc-ksx7c\" (UID: \"588c52cc-05c0-438d-bb0f-80bc1236d8cc\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.833810 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hfh4\" (UniqueName: \"kubernetes.io/projected/15e497ba-5375-4926-80f5-f46940572f8f-kube-api-access-6hfh4\") pod \"manila-operator-controller-manager-7c79b5df47-g8nwt\" (UID: \"15e497ba-5375-4926-80f5-f46940572f8f\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.834231 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk9l6\" (UniqueName: \"kubernetes.io/projected/e3a43b00-c682-4d04-9996-ceb79a245a18-kube-api-access-nk9l6\") pod \"ironic-operator-controller-manager-6c548fd776-92mth\" (UID: \"e3a43b00-c682-4d04-9996-ceb79a245a18\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.837691 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.837816 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.838904 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.843717 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-65t9w" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.843774 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-jsf9g" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.856569 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.868949 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9gj5\" (UniqueName: \"kubernetes.io/projected/2abae0c8-1cd8-4329-a4dc-678124e1195a-kube-api-access-d9gj5\") pod \"mariadb-operator-controller-manager-56bbcc9d85-zm7n7\" (UID: \"2abae0c8-1cd8-4329-a4dc-678124e1195a\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.873592 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk9l6\" (UniqueName: \"kubernetes.io/projected/e3a43b00-c682-4d04-9996-ceb79a245a18-kube-api-access-nk9l6\") pod \"ironic-operator-controller-manager-6c548fd776-92mth\" (UID: \"e3a43b00-c682-4d04-9996-ceb79a245a18\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.873708 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hfh4\" (UniqueName: \"kubernetes.io/projected/15e497ba-5375-4926-80f5-f46940572f8f-kube-api-access-6hfh4\") pod \"manila-operator-controller-manager-7c79b5df47-g8nwt\" (UID: \"15e497ba-5375-4926-80f5-f46940572f8f\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.886386 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.891196 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.897057 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.898078 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.901641 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-7bnxc" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.904725 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.926212 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.926566 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.927698 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.929745 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-l894r" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.936025 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb59r\" (UniqueName: \"kubernetes.io/projected/8bd01614-55c6-44bf-b67b-8a6570d9425c-kube-api-access-lb59r\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.936088 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzgd5\" (UniqueName: \"kubernetes.io/projected/588c52cc-05c0-438d-bb0f-80bc1236d8cc-kube-api-access-wzgd5\") pod \"nova-operator-controller-manager-697bc559fc-ksx7c\" (UID: \"588c52cc-05c0-438d-bb0f-80bc1236d8cc\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.936179 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ttkc\" (UniqueName: \"kubernetes.io/projected/dde05ba1-9b55-4f92-9782-d03fed8f26b0-kube-api-access-2ttkc\") pod \"octavia-operator-controller-manager-998648c74-98tg4\" (UID: \"dde05ba1-9b55-4f92-9782-d03fed8f26b0\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.936232 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.936370 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd9nj\" (UniqueName: \"kubernetes.io/projected/8becc537-85f1-4b33-8b6a-1ef3bc550cdd-kube-api-access-gd9nj\") pod \"ovn-operator-controller-manager-b6456fdb6-qrj8t\" (UID: \"8becc537-85f1-4b33-8b6a-1ef3bc550cdd\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.936419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b9jj\" (UniqueName: \"kubernetes.io/projected/ef4ea028-2f42-4560-aad7-94553ba2d3d4-kube-api-access-8b9jj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-b7kc7\" (UID: \"ef4ea028-2f42-4560-aad7-94553ba2d3d4\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.948490 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.959648 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d922f"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.960856 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.964570 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.966497 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-7lbvt" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.967979 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b9jj\" (UniqueName: \"kubernetes.io/projected/ef4ea028-2f42-4560-aad7-94553ba2d3d4-kube-api-access-8b9jj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-b7kc7\" (UID: \"ef4ea028-2f42-4560-aad7-94553ba2d3d4\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.972706 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzgd5\" (UniqueName: \"kubernetes.io/projected/588c52cc-05c0-438d-bb0f-80bc1236d8cc-kube-api-access-wzgd5\") pod \"nova-operator-controller-manager-697bc559fc-ksx7c\" (UID: \"588c52cc-05c0-438d-bb0f-80bc1236d8cc\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.972777 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d922f"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.992747 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.993849 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.995829 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs"] Dec 02 18:54:47 crc kubenswrapper[4792]: I1202 18:54:47.998487 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-wjv7d" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.010284 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.045319 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.046954 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh9l4\" (UniqueName: \"kubernetes.io/projected/c305906f-16d5-4e43-9666-299106995d65-kube-api-access-mh9l4\") pod \"placement-operator-controller-manager-78f8948974-kqqtk\" (UID: \"c305906f-16d5-4e43-9666-299106995d65\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047001 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd9nj\" (UniqueName: \"kubernetes.io/projected/8becc537-85f1-4b33-8b6a-1ef3bc550cdd-kube-api-access-gd9nj\") pod \"ovn-operator-controller-manager-b6456fdb6-qrj8t\" (UID: \"8becc537-85f1-4b33-8b6a-1ef3bc550cdd\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047027 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drn6g\" (UniqueName: \"kubernetes.io/projected/64a3a015-bcba-4079-b30b-47579e9a7513-kube-api-access-drn6g\") pod \"swift-operator-controller-manager-5f8c65bbfc-cfpd2\" (UID: \"64a3a015-bcba-4079-b30b-47579e9a7513\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047065 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb59r\" (UniqueName: \"kubernetes.io/projected/8bd01614-55c6-44bf-b67b-8a6570d9425c-kube-api-access-lb59r\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047083 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45rbw\" (UniqueName: \"kubernetes.io/projected/746154e5-b7a7-4ce9-b0db-4c88c998ccac-kube-api-access-45rbw\") pod \"test-operator-controller-manager-5854674fcc-d922f\" (UID: \"746154e5-b7a7-4ce9-b0db-4c88c998ccac\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047117 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg229\" (UniqueName: \"kubernetes.io/projected/474ebfec-9504-4baa-a320-af5bd167bf33-kube-api-access-tg229\") pod \"telemetry-operator-controller-manager-6cc9d48475-tplrw\" (UID: \"474ebfec-9504-4baa-a320-af5bd167bf33\") " pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047150 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqc4z\" (UniqueName: \"kubernetes.io/projected/ad6c4009-148b-4b91-bd36-4d9bd2a16bed-kube-api-access-hqc4z\") pod \"watcher-operator-controller-manager-769dc69bc-6v9cs\" (UID: \"ad6c4009-148b-4b91-bd36-4d9bd2a16bed\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047169 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ttkc\" (UniqueName: \"kubernetes.io/projected/dde05ba1-9b55-4f92-9782-d03fed8f26b0-kube-api-access-2ttkc\") pod \"octavia-operator-controller-manager-998648c74-98tg4\" (UID: \"dde05ba1-9b55-4f92-9782-d03fed8f26b0\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.047192 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.047320 4792 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.047364 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert podName:8bd01614-55c6-44bf-b67b-8a6570d9425c nodeName:}" failed. No retries permitted until 2025-12-02 18:54:48.547350806 +0000 UTC m=+1119.320243134 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" (UID: "8bd01614-55c6-44bf-b67b-8a6570d9425c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.048014 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.066956 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq"] Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.067769 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.069899 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.069926 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-9qvq7" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.070122 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.080216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.081285 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ttkc\" (UniqueName: \"kubernetes.io/projected/dde05ba1-9b55-4f92-9782-d03fed8f26b0-kube-api-access-2ttkc\") pod \"octavia-operator-controller-manager-998648c74-98tg4\" (UID: \"dde05ba1-9b55-4f92-9782-d03fed8f26b0\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.091471 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd9nj\" (UniqueName: \"kubernetes.io/projected/8becc537-85f1-4b33-8b6a-1ef3bc550cdd-kube-api-access-gd9nj\") pod \"ovn-operator-controller-manager-b6456fdb6-qrj8t\" (UID: \"8becc537-85f1-4b33-8b6a-1ef3bc550cdd\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.097406 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb59r\" (UniqueName: \"kubernetes.io/projected/8bd01614-55c6-44bf-b67b-8a6570d9425c-kube-api-access-lb59r\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.105574 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.121623 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq"] Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158210 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158281 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg229\" (UniqueName: \"kubernetes.io/projected/474ebfec-9504-4baa-a320-af5bd167bf33-kube-api-access-tg229\") pod \"telemetry-operator-controller-manager-6cc9d48475-tplrw\" (UID: \"474ebfec-9504-4baa-a320-af5bd167bf33\") " pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158313 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158347 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqc4z\" (UniqueName: \"kubernetes.io/projected/ad6c4009-148b-4b91-bd36-4d9bd2a16bed-kube-api-access-hqc4z\") pod \"watcher-operator-controller-manager-769dc69bc-6v9cs\" (UID: \"ad6c4009-148b-4b91-bd36-4d9bd2a16bed\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158386 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llsjd\" (UniqueName: \"kubernetes.io/projected/f2e8a63e-9ce0-4009-b041-46c7f29daa11-kube-api-access-llsjd\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh9l4\" (UniqueName: \"kubernetes.io/projected/c305906f-16d5-4e43-9666-299106995d65-kube-api-access-mh9l4\") pod \"placement-operator-controller-manager-78f8948974-kqqtk\" (UID: \"c305906f-16d5-4e43-9666-299106995d65\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158459 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drn6g\" (UniqueName: \"kubernetes.io/projected/64a3a015-bcba-4079-b30b-47579e9a7513-kube-api-access-drn6g\") pod \"swift-operator-controller-manager-5f8c65bbfc-cfpd2\" (UID: \"64a3a015-bcba-4079-b30b-47579e9a7513\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.158529 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45rbw\" (UniqueName: \"kubernetes.io/projected/746154e5-b7a7-4ce9-b0db-4c88c998ccac-kube-api-access-45rbw\") pod \"test-operator-controller-manager-5854674fcc-d922f\" (UID: \"746154e5-b7a7-4ce9-b0db-4c88c998ccac\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.171561 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.179407 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drn6g\" (UniqueName: \"kubernetes.io/projected/64a3a015-bcba-4079-b30b-47579e9a7513-kube-api-access-drn6g\") pod \"swift-operator-controller-manager-5f8c65bbfc-cfpd2\" (UID: \"64a3a015-bcba-4079-b30b-47579e9a7513\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.183937 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh9l4\" (UniqueName: \"kubernetes.io/projected/c305906f-16d5-4e43-9666-299106995d65-kube-api-access-mh9l4\") pod \"placement-operator-controller-manager-78f8948974-kqqtk\" (UID: \"c305906f-16d5-4e43-9666-299106995d65\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.184647 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqc4z\" (UniqueName: \"kubernetes.io/projected/ad6c4009-148b-4b91-bd36-4d9bd2a16bed-kube-api-access-hqc4z\") pod \"watcher-operator-controller-manager-769dc69bc-6v9cs\" (UID: \"ad6c4009-148b-4b91-bd36-4d9bd2a16bed\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.185293 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45rbw\" (UniqueName: \"kubernetes.io/projected/746154e5-b7a7-4ce9-b0db-4c88c998ccac-kube-api-access-45rbw\") pod \"test-operator-controller-manager-5854674fcc-d922f\" (UID: \"746154e5-b7a7-4ce9-b0db-4c88c998ccac\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.189860 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg229\" (UniqueName: \"kubernetes.io/projected/474ebfec-9504-4baa-a320-af5bd167bf33-kube-api-access-tg229\") pod \"telemetry-operator-controller-manager-6cc9d48475-tplrw\" (UID: \"474ebfec-9504-4baa-a320-af5bd167bf33\") " pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.196502 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms"] Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.197855 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.198249 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.203197 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-ndtcb" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.203364 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms"] Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.233945 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.239269 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd"] Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.266246 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.266279 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.266305 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.266337 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t5nc\" (UniqueName: \"kubernetes.io/projected/feb0adee-ff46-4603-80f1-a086af7e863c-kube-api-access-5t5nc\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xn9ms\" (UID: \"feb0adee-ff46-4603-80f1-a086af7e863c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.266370 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llsjd\" (UniqueName: \"kubernetes.io/projected/f2e8a63e-9ce0-4009-b041-46c7f29daa11-kube-api-access-llsjd\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.266749 4792 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.266784 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:48.76677166 +0000 UTC m=+1119.539663978 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "metrics-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.266940 4792 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.266963 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert podName:4998553b-ffbc-4684-9756-22885fec1a98 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:49.266953354 +0000 UTC m=+1120.039845682 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert") pod "infra-operator-controller-manager-57548d458d-l4tvg" (UID: "4998553b-ffbc-4684-9756-22885fec1a98") : secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.266994 4792 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.267011 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:48.767004826 +0000 UTC m=+1119.539897144 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.267131 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.286218 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llsjd\" (UniqueName: \"kubernetes.io/projected/f2e8a63e-9ce0-4009-b041-46c7f29daa11-kube-api-access-llsjd\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.323812 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.367347 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t5nc\" (UniqueName: \"kubernetes.io/projected/feb0adee-ff46-4603-80f1-a086af7e863c-kube-api-access-5t5nc\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xn9ms\" (UID: \"feb0adee-ff46-4603-80f1-a086af7e863c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.399975 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.442312 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t5nc\" (UniqueName: \"kubernetes.io/projected/feb0adee-ff46-4603-80f1-a086af7e863c-kube-api-access-5t5nc\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xn9ms\" (UID: \"feb0adee-ff46-4603-80f1-a086af7e863c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.532872 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.571285 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.571721 4792 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.571782 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert podName:8bd01614-55c6-44bf-b67b-8a6570d9425c nodeName:}" failed. No retries permitted until 2025-12-02 18:54:49.571768602 +0000 UTC m=+1120.344660930 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" (UID: "8bd01614-55c6-44bf-b67b-8a6570d9425c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.638271 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6"] Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.673560 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw"] Dec 02 18:54:48 crc kubenswrapper[4792]: W1202 18:54:48.701849 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod851b5fce_f6b9_4fef_a80c_e66336c5fa49.slice/crio-b88dc870ffcc6cd9e385cb859602539a2d780ab287f9b36cf5cab21adaa35fd6 WatchSource:0}: Error finding container b88dc870ffcc6cd9e385cb859602539a2d780ab287f9b36cf5cab21adaa35fd6: Status 404 returned error can't find the container with id b88dc870ffcc6cd9e385cb859602539a2d780ab287f9b36cf5cab21adaa35fd6 Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.775453 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.775577 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.775774 4792 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.775832 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:49.775819127 +0000 UTC m=+1120.548711455 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "webhook-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.776334 4792 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: E1202 18:54:48.776388 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:49.77636138 +0000 UTC m=+1120.549253708 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "metrics-server-cert" not found Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.989787 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j"] Dec 02 18:54:48 crc kubenswrapper[4792]: I1202 18:54:48.994617 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.025742 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq"] Dec 02 18:54:49 crc kubenswrapper[4792]: W1202 18:54:49.029739 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f1a320f_5255_4fc4_b973_39ce2aee3bae.slice/crio-218b83c45105e7df54eb9d4c2e7fec627b189d763db356a4638a9f590992ac0a WatchSource:0}: Error finding container 218b83c45105e7df54eb9d4c2e7fec627b189d763db356a4638a9f590992ac0a: Status 404 returned error can't find the container with id 218b83c45105e7df54eb9d4c2e7fec627b189d763db356a4638a9f590992ac0a Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.198887 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" event={"ID":"9f1a320f-5255-4fc4-b973-39ce2aee3bae","Type":"ContainerStarted","Data":"218b83c45105e7df54eb9d4c2e7fec627b189d763db356a4638a9f590992ac0a"} Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.199877 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" event={"ID":"77a52e44-0fcb-4b97-93de-0d26a6901c37","Type":"ContainerStarted","Data":"f6a881a6054621f87aa59847978b165ebd0f8672092f77e785ad48c665c65c0d"} Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.201095 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" event={"ID":"851b5fce-f6b9-4fef-a80c-e66336c5fa49","Type":"ContainerStarted","Data":"b88dc870ffcc6cd9e385cb859602539a2d780ab287f9b36cf5cab21adaa35fd6"} Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.202653 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" event={"ID":"fce25a63-01bd-458a-9567-f08f710abec9","Type":"ContainerStarted","Data":"5723f924dbc7f27f4b5c442d28bf2c74117d0765f6719f53e2c7fa8a17c2ad0f"} Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.206060 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" event={"ID":"daa6dcd6-39c7-44fc-9754-7de254748ec3","Type":"ContainerStarted","Data":"d071e1f90e9da3e935aa49ee6d63f5ae913874e649ba212f4f24cf0efe3aa77a"} Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.207341 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" event={"ID":"9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9","Type":"ContainerStarted","Data":"b84307ea7392fce19798bea04391a6b76184ac1a6204442b22a85bbb1f4932ad"} Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.283048 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.284193 4792 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.284276 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert podName:4998553b-ffbc-4684-9756-22885fec1a98 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:51.284258209 +0000 UTC m=+1122.057150537 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert") pod "infra-operator-controller-manager-57548d458d-l4tvg" (UID: "4998553b-ffbc-4684-9756-22885fec1a98") : secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.317417 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.332342 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.353214 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2"] Dec 02 18:54:49 crc kubenswrapper[4792]: W1202 18:54:49.361444 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2abae0c8_1cd8_4329_a4dc_678124e1195a.slice/crio-88fb9b7e8a25880c38d4e83815fe8e00772e30cfac1511775b49bf87c18ff5c2 WatchSource:0}: Error finding container 88fb9b7e8a25880c38d4e83815fe8e00772e30cfac1511775b49bf87c18ff5c2: Status 404 returned error can't find the container with id 88fb9b7e8a25880c38d4e83815fe8e00772e30cfac1511775b49bf87c18ff5c2 Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.367880 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.377254 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth"] Dec 02 18:54:49 crc kubenswrapper[4792]: W1202 18:54:49.388871 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod474ebfec_9504_4baa_a320_af5bd167bf33.slice/crio-0805c6950ddfc9216e006e12fd87faad35c843ca9bec32b4f4d789e61c6766ab WatchSource:0}: Error finding container 0805c6950ddfc9216e006e12fd87faad35c843ca9bec32b4f4d789e61c6766ab: Status 404 returned error can't find the container with id 0805c6950ddfc9216e006e12fd87faad35c843ca9bec32b4f4d789e61c6766ab Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.389350 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt"] Dec 02 18:54:49 crc kubenswrapper[4792]: W1202 18:54:49.395833 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15e497ba_5375_4926_80f5_f46940572f8f.slice/crio-7d91ddab17773b9c6847c6bf58e22bd0adef6d8d3e8e98daa51d06408106789e WatchSource:0}: Error finding container 7d91ddab17773b9c6847c6bf58e22bd0adef6d8d3e8e98daa51d06408106789e: Status 404 returned error can't find the container with id 7d91ddab17773b9c6847c6bf58e22bd0adef6d8d3e8e98daa51d06408106789e Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.398145 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-98tg4"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.409105 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.415453 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t"] Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.415690 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mh9l4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-kqqtk_openstack-operators(c305906f-16d5-4e43-9666-299106995d65): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.417683 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mh9l4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-kqqtk_openstack-operators(c305906f-16d5-4e43-9666-299106995d65): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.421780 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" podUID="c305906f-16d5-4e43-9666-299106995d65" Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.425178 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.430775 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs"] Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.438998 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.213:5001/openstack-k8s-operators/telemetry-operator:873139a69664212bae113aa86760d7345f0ff121,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tg229,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-6cc9d48475-tplrw_openstack-operators(474ebfec-9504-4baa-a320-af5bd167bf33): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.439831 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hqc4z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-6v9cs_openstack-operators(ad6c4009-148b-4b91-bd36-4d9bd2a16bed): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.440207 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-45rbw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d922f_openstack-operators(746154e5-b7a7-4ce9-b0db-4c88c998ccac): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.443256 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4"] Dec 02 18:54:49 crc kubenswrapper[4792]: W1202 18:54:49.445245 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfeb0adee_ff46_4603_80f1_a086af7e863c.slice/crio-55b89ff9435edc2a792a3195573247c1c60e05cfc16c6d2365fdca6b0e274784 WatchSource:0}: Error finding container 55b89ff9435edc2a792a3195573247c1c60e05cfc16c6d2365fdca6b0e274784: Status 404 returned error can't find the container with id 55b89ff9435edc2a792a3195573247c1c60e05cfc16c6d2365fdca6b0e274784 Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.445401 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tg229,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-6cc9d48475-tplrw_openstack-operators(474ebfec-9504-4baa-a320-af5bd167bf33): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.445402 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-45rbw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d922f_openstack-operators(746154e5-b7a7-4ce9-b0db-4c88c998ccac): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.446997 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" podUID="746154e5-b7a7-4ce9-b0db-4c88c998ccac" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.447129 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" podUID="474ebfec-9504-4baa-a320-af5bd167bf33" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.447849 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hqc4z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-6v9cs_openstack-operators(ad6c4009-148b-4b91-bd36-4d9bd2a16bed): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.448988 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" podUID="ad6c4009-148b-4b91-bd36-4d9bd2a16bed" Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.449727 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d922f"] Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.452600 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8jsbd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-ddpv4_openstack-operators(a725d6d0-4642-4316-9e67-e002d58f7117): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.453441 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5t5nc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-xn9ms_openstack-operators(feb0adee-ff46-4603-80f1-a086af7e863c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.454631 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" podUID="feb0adee-ff46-4603-80f1-a086af7e863c" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.456493 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8jsbd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-ddpv4_openstack-operators(a725d6d0-4642-4316-9e67-e002d58f7117): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.457746 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" podUID="a725d6d0-4642-4316-9e67-e002d58f7117" Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.461953 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms"] Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.594079 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.595194 4792 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.595246 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert podName:8bd01614-55c6-44bf-b67b-8a6570d9425c nodeName:}" failed. No retries permitted until 2025-12-02 18:54:51.595230775 +0000 UTC m=+1122.368123103 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" (UID: "8bd01614-55c6-44bf-b67b-8a6570d9425c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.798457 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.799124 4792 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.799182 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:51.799165026 +0000 UTC m=+1122.572057364 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "metrics-server-cert" not found Dec 02 18:54:49 crc kubenswrapper[4792]: I1202 18:54:49.799505 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.799643 4792 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 18:54:49 crc kubenswrapper[4792]: E1202 18:54:49.799674 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:51.799665689 +0000 UTC m=+1122.572558017 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "webhook-server-cert" not found Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.215371 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" event={"ID":"474ebfec-9504-4baa-a320-af5bd167bf33","Type":"ContainerStarted","Data":"0805c6950ddfc9216e006e12fd87faad35c843ca9bec32b4f4d789e61c6766ab"} Dec 02 18:54:50 crc kubenswrapper[4792]: E1202 18:54:50.217451 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.213:5001/openstack-k8s-operators/telemetry-operator:873139a69664212bae113aa86760d7345f0ff121\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" podUID="474ebfec-9504-4baa-a320-af5bd167bf33" Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.221540 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" event={"ID":"feb0adee-ff46-4603-80f1-a086af7e863c","Type":"ContainerStarted","Data":"55b89ff9435edc2a792a3195573247c1c60e05cfc16c6d2365fdca6b0e274784"} Dec 02 18:54:50 crc kubenswrapper[4792]: E1202 18:54:50.223860 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" podUID="feb0adee-ff46-4603-80f1-a086af7e863c" Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.232568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" event={"ID":"2abae0c8-1cd8-4329-a4dc-678124e1195a","Type":"ContainerStarted","Data":"88fb9b7e8a25880c38d4e83815fe8e00772e30cfac1511775b49bf87c18ff5c2"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.240026 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" event={"ID":"15e497ba-5375-4926-80f5-f46940572f8f","Type":"ContainerStarted","Data":"7d91ddab17773b9c6847c6bf58e22bd0adef6d8d3e8e98daa51d06408106789e"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.241715 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" event={"ID":"ad6c4009-148b-4b91-bd36-4d9bd2a16bed","Type":"ContainerStarted","Data":"f84245db0ff7e87aa2d8c23fea2988cbdfb139b34de832c335979ab2c78f3c9f"} Dec 02 18:54:50 crc kubenswrapper[4792]: E1202 18:54:50.244383 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" podUID="ad6c4009-148b-4b91-bd36-4d9bd2a16bed" Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.247821 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" event={"ID":"dde05ba1-9b55-4f92-9782-d03fed8f26b0","Type":"ContainerStarted","Data":"1dde3e4ff8dd507a67b142b3c6bccae082f9c60ff29234171cca05d9ac7d7b80"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.256125 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" event={"ID":"746154e5-b7a7-4ce9-b0db-4c88c998ccac","Type":"ContainerStarted","Data":"c316528400055c715691b770ce19b761deb8f3d894a46c928303243610bdfbca"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.258573 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" event={"ID":"ef4ea028-2f42-4560-aad7-94553ba2d3d4","Type":"ContainerStarted","Data":"ef73b82db2a9a7d66011ce2389268746c78e71d4002fefc0814b48729fac5b13"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.260031 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" event={"ID":"588c52cc-05c0-438d-bb0f-80bc1236d8cc","Type":"ContainerStarted","Data":"e09e53be3834eb148b5aad95d672f5b7c3ed01990d8c3d03c1ba0f767c97a9f6"} Dec 02 18:54:50 crc kubenswrapper[4792]: E1202 18:54:50.262628 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" podUID="746154e5-b7a7-4ce9-b0db-4c88c998ccac" Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.268185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" event={"ID":"a725d6d0-4642-4316-9e67-e002d58f7117","Type":"ContainerStarted","Data":"f8aaf2f48bc2d2e13c3cb3574a02092c813a6f1fdf9a2d0f4f27a52b47d4d500"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.269469 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" event={"ID":"8becc537-85f1-4b33-8b6a-1ef3bc550cdd","Type":"ContainerStarted","Data":"6268f3954084fbea26e00e49a290d75f8742b5b878ef15e4e214ac533415f70d"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.272476 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" event={"ID":"e3a43b00-c682-4d04-9996-ceb79a245a18","Type":"ContainerStarted","Data":"9784f6be91135f8464b54c5692fcc878963e1ddd186e6cd426e7b4e72845c9e0"} Dec 02 18:54:50 crc kubenswrapper[4792]: E1202 18:54:50.273464 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" podUID="a725d6d0-4642-4316-9e67-e002d58f7117" Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.273806 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" event={"ID":"c305906f-16d5-4e43-9666-299106995d65","Type":"ContainerStarted","Data":"3330ffaf52730d1294ccc6e3fb64f7f45e3ed15ac86b8ed2446f434fae0af4fd"} Dec 02 18:54:50 crc kubenswrapper[4792]: I1202 18:54:50.277979 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" event={"ID":"64a3a015-bcba-4079-b30b-47579e9a7513","Type":"ContainerStarted","Data":"747747cc53fe95d66ef09e251c8fa7b755d2fd33e35ee1dd88f4a39923f79ab5"} Dec 02 18:54:50 crc kubenswrapper[4792]: E1202 18:54:50.282449 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" podUID="c305906f-16d5-4e43-9666-299106995d65" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.290074 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" podUID="746154e5-b7a7-4ce9-b0db-4c88c998ccac" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.290073 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.213:5001/openstack-k8s-operators/telemetry-operator:873139a69664212bae113aa86760d7345f0ff121\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" podUID="474ebfec-9504-4baa-a320-af5bd167bf33" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.290597 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" podUID="ad6c4009-148b-4b91-bd36-4d9bd2a16bed" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.291006 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" podUID="a725d6d0-4642-4316-9e67-e002d58f7117" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.291256 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" podUID="c305906f-16d5-4e43-9666-299106995d65" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.296797 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" podUID="feb0adee-ff46-4603-80f1-a086af7e863c" Dec 02 18:54:51 crc kubenswrapper[4792]: I1202 18:54:51.337038 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.337861 4792 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.337900 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert podName:4998553b-ffbc-4684-9756-22885fec1a98 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:55.337889314 +0000 UTC m=+1126.110781642 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert") pod "infra-operator-controller-manager-57548d458d-l4tvg" (UID: "4998553b-ffbc-4684-9756-22885fec1a98") : secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:51 crc kubenswrapper[4792]: I1202 18:54:51.643220 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.643406 4792 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.643465 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert podName:8bd01614-55c6-44bf-b67b-8a6570d9425c nodeName:}" failed. No retries permitted until 2025-12-02 18:54:55.643446762 +0000 UTC m=+1126.416339090 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" (UID: "8bd01614-55c6-44bf-b67b-8a6570d9425c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:51 crc kubenswrapper[4792]: I1202 18:54:51.845065 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:51 crc kubenswrapper[4792]: I1202 18:54:51.845140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.845373 4792 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.845439 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:55.845406203 +0000 UTC m=+1126.618298521 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "webhook-server-cert" not found Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.846009 4792 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 18:54:51 crc kubenswrapper[4792]: E1202 18:54:51.846247 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:54:55.846237394 +0000 UTC m=+1126.619129722 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "metrics-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: I1202 18:54:55.410029 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.410211 4792 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.411388 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert podName:4998553b-ffbc-4684-9756-22885fec1a98 nodeName:}" failed. No retries permitted until 2025-12-02 18:55:03.411364404 +0000 UTC m=+1134.184256762 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert") pod "infra-operator-controller-manager-57548d458d-l4tvg" (UID: "4998553b-ffbc-4684-9756-22885fec1a98") : secret "infra-operator-webhook-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: I1202 18:54:55.716511 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.716706 4792 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.716806 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert podName:8bd01614-55c6-44bf-b67b-8a6570d9425c nodeName:}" failed. No retries permitted until 2025-12-02 18:55:03.716779408 +0000 UTC m=+1134.489671756 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" (UID: "8bd01614-55c6-44bf-b67b-8a6570d9425c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: I1202 18:54:55.927001 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:55 crc kubenswrapper[4792]: I1202 18:54:55.927107 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.927353 4792 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.927422 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:55:03.927400869 +0000 UTC m=+1134.700293227 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "webhook-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.927985 4792 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 18:54:55 crc kubenswrapper[4792]: E1202 18:54:55.928067 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs podName:f2e8a63e-9ce0-4009-b041-46c7f29daa11 nodeName:}" failed. No retries permitted until 2025-12-02 18:55:03.928025345 +0000 UTC m=+1134.700917713 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs") pod "openstack-operator-controller-manager-54d77c4c6-68vgq" (UID: "f2e8a63e-9ce0-4009-b041-46c7f29daa11") : secret "metrics-server-cert" not found Dec 02 18:55:01 crc kubenswrapper[4792]: E1202 18:55:01.872031 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 02 18:55:01 crc kubenswrapper[4792]: E1202 18:55:01.873045 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jghrx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-xbg4j_openstack-operators(77a52e44-0fcb-4b97-93de-0d26a6901c37): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.466384 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.477989 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4998553b-ffbc-4684-9756-22885fec1a98-cert\") pod \"infra-operator-controller-manager-57548d458d-l4tvg\" (UID: \"4998553b-ffbc-4684-9756-22885fec1a98\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.683129 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.773280 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.780028 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bd01614-55c6-44bf-b67b-8a6570d9425c-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj\" (UID: \"8bd01614-55c6-44bf-b67b-8a6570d9425c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.976964 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.977212 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.983001 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-metrics-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:55:03 crc kubenswrapper[4792]: I1202 18:55:03.983032 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f2e8a63e-9ce0-4009-b041-46c7f29daa11-webhook-certs\") pod \"openstack-operator-controller-manager-54d77c4c6-68vgq\" (UID: \"f2e8a63e-9ce0-4009-b041-46c7f29daa11\") " pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:55:04 crc kubenswrapper[4792]: I1202 18:55:04.019341 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:55:04 crc kubenswrapper[4792]: I1202 18:55:04.040289 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:55:08 crc kubenswrapper[4792]: I1202 18:55:08.081924 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:55:08 crc kubenswrapper[4792]: I1202 18:55:08.084172 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:55:11 crc kubenswrapper[4792]: E1202 18:55:11.469099 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 02 18:55:11 crc kubenswrapper[4792]: E1202 18:55:11.469695 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-49b25,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-xrbqg_openstack-operators(9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:55:11 crc kubenswrapper[4792]: E1202 18:55:11.698510 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 02 18:55:11 crc kubenswrapper[4792]: E1202 18:55:11.698723 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wzgd5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-ksx7c_openstack-operators(588c52cc-05c0-438d-bb0f-80bc1236d8cc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.202344 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj"] Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.306829 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq"] Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.369830 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg"] Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.519818 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" event={"ID":"8becc537-85f1-4b33-8b6a-1ef3bc550cdd","Type":"ContainerStarted","Data":"ea715a50fcf26c1a0a50f779d1a673706fd8c5aadbdbe8057d2280191e22c12d"} Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.534619 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" event={"ID":"9f1a320f-5255-4fc4-b973-39ce2aee3bae","Type":"ContainerStarted","Data":"5d5b8e62eab14daedbd357ef08eea437241d39d93c4e13b4b515f56447da504e"} Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.561820 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" event={"ID":"e3a43b00-c682-4d04-9996-ceb79a245a18","Type":"ContainerStarted","Data":"668b592cf1f879569f09b109c26218e9d5f9377d823f3c98ef48b2b6c68a539a"} Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.565013 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" event={"ID":"851b5fce-f6b9-4fef-a80c-e66336c5fa49","Type":"ContainerStarted","Data":"f49a9a189b0b415216779c24a43dd06f1eca3805360d002682740ff6c9d5c31a"} Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.568810 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" event={"ID":"fce25a63-01bd-458a-9567-f08f710abec9","Type":"ContainerStarted","Data":"265ec4b91e452057b3e2c9d779c891328b728941af0a9b74869913ce2eb37390"} Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.570793 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" event={"ID":"daa6dcd6-39c7-44fc-9754-7de254748ec3","Type":"ContainerStarted","Data":"de18349afb46c436a1b951de86929f8ed5a457256d607e342895583144f9bc3a"} Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.579124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" event={"ID":"2abae0c8-1cd8-4329-a4dc-678124e1195a","Type":"ContainerStarted","Data":"81ed2f17fe62ef6b3c5548d6042591e93e34ee27b5b5ccc880fe3ed1cbaf9759"} Dec 02 18:55:12 crc kubenswrapper[4792]: I1202 18:55:12.583170 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" event={"ID":"15e497ba-5375-4926-80f5-f46940572f8f","Type":"ContainerStarted","Data":"fdd20b0105be3ca993efecc9e4b4694bf6d9540fbb919df0cb9032d167e53832"} Dec 02 18:55:13 crc kubenswrapper[4792]: W1202 18:55:13.505608 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4998553b_ffbc_4684_9756_22885fec1a98.slice/crio-aa8506036dec3bfbcf5c6edca6c0498f20a58688abe4d278bec64719c743c037 WatchSource:0}: Error finding container aa8506036dec3bfbcf5c6edca6c0498f20a58688abe4d278bec64719c743c037: Status 404 returned error can't find the container with id aa8506036dec3bfbcf5c6edca6c0498f20a58688abe4d278bec64719c743c037 Dec 02 18:55:13 crc kubenswrapper[4792]: I1202 18:55:13.591742 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" event={"ID":"f2e8a63e-9ce0-4009-b041-46c7f29daa11","Type":"ContainerStarted","Data":"e2941f1546150ee9fd7ab2661c0ff8e7c572c05d243ff33da3213c339807bb39"} Dec 02 18:55:13 crc kubenswrapper[4792]: I1202 18:55:13.594187 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" event={"ID":"dde05ba1-9b55-4f92-9782-d03fed8f26b0","Type":"ContainerStarted","Data":"623aa02eab32fb070b35e665b8f0a11ff2c8371cb22b7e71b0a53de2fd292ad2"} Dec 02 18:55:13 crc kubenswrapper[4792]: I1202 18:55:13.601573 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" event={"ID":"ef4ea028-2f42-4560-aad7-94553ba2d3d4","Type":"ContainerStarted","Data":"12b250115023226e35760100af654417d95ac0bc9f6bde7a8eba8273a6654f77"} Dec 02 18:55:13 crc kubenswrapper[4792]: I1202 18:55:13.605654 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" event={"ID":"4998553b-ffbc-4684-9756-22885fec1a98","Type":"ContainerStarted","Data":"aa8506036dec3bfbcf5c6edca6c0498f20a58688abe4d278bec64719c743c037"} Dec 02 18:55:13 crc kubenswrapper[4792]: I1202 18:55:13.613137 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" event={"ID":"64a3a015-bcba-4079-b30b-47579e9a7513","Type":"ContainerStarted","Data":"5bfd1a277f55ff5bedb6848ecf3abd45540dfd233d21e611f176c002f057f942"} Dec 02 18:55:13 crc kubenswrapper[4792]: I1202 18:55:13.617555 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" event={"ID":"8bd01614-55c6-44bf-b67b-8a6570d9425c","Type":"ContainerStarted","Data":"366ab90d41d476643041042379bed6bf0ebf92509e03502f58fe913423e1b2d4"} Dec 02 18:55:15 crc kubenswrapper[4792]: I1202 18:55:15.644458 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" event={"ID":"f2e8a63e-9ce0-4009-b041-46c7f29daa11","Type":"ContainerStarted","Data":"68a9689bddfe014ddbfa932481b64525fcb2c8f1fd9461bce614ac362418e71d"} Dec 02 18:55:15 crc kubenswrapper[4792]: I1202 18:55:15.645027 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:55:15 crc kubenswrapper[4792]: I1202 18:55:15.693165 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" podStartSLOduration=28.693145345 podStartE2EDuration="28.693145345s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:55:15.682781602 +0000 UTC m=+1146.455673940" watchObservedRunningTime="2025-12-02 18:55:15.693145345 +0000 UTC m=+1146.466037673" Dec 02 18:55:22 crc kubenswrapper[4792]: E1202 18:55:22.197275 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" podUID="77a52e44-0fcb-4b97-93de-0d26a6901c37" Dec 02 18:55:22 crc kubenswrapper[4792]: E1202 18:55:22.237624 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" podUID="9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.726592 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" event={"ID":"8bd01614-55c6-44bf-b67b-8a6570d9425c","Type":"ContainerStarted","Data":"9bfbd79edddedaad245263e8ce41070cbe9487bdae797ea0a7eaaf7f0912dd1e"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.727690 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" event={"ID":"9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9","Type":"ContainerStarted","Data":"a08ea370d35f15c863573649994e0d2f590fdeddb91cdb62334a05d2c164c78c"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.731196 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" event={"ID":"e3a43b00-c682-4d04-9996-ceb79a245a18","Type":"ContainerStarted","Data":"5a980362e9a1d599b64a4f8135b1e68e8ed66a62de42905c1fdfa0524dae233e"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.732878 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.733411 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.734499 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" event={"ID":"feb0adee-ff46-4603-80f1-a086af7e863c","Type":"ContainerStarted","Data":"9c74614551147f5f726b9c220e88702a9b44471716823dcf09127e3a2c7ad95f"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.737212 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" event={"ID":"2abae0c8-1cd8-4329-a4dc-678124e1195a","Type":"ContainerStarted","Data":"d1c35c23038bf1a04ab31c093ea875e5313e1b22c562b5b092958517f845a848"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.737799 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.739194 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.739827 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" event={"ID":"4998553b-ffbc-4684-9756-22885fec1a98","Type":"ContainerStarted","Data":"1bd3208390c6aae695f3cdb43d52edd0742d385a844ac6c8ba045140b71e0b3d"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.740674 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" event={"ID":"ad6c4009-148b-4b91-bd36-4d9bd2a16bed","Type":"ContainerStarted","Data":"37f55d6462b536c24ba754e61aa5501380bb7c2ac9c0d03aafb61c5631e8b502"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.743944 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" event={"ID":"474ebfec-9504-4baa-a320-af5bd167bf33","Type":"ContainerStarted","Data":"e343cd565dfb74d8c228f5db106458fefddc2e597af061ac57861020b15dbc16"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.745615 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" event={"ID":"a725d6d0-4642-4316-9e67-e002d58f7117","Type":"ContainerStarted","Data":"ff1c0b21f5bb115ee6adf9b0e740776bf448c5dc310de16a382b052f07b069e0"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.746653 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" event={"ID":"daa6dcd6-39c7-44fc-9754-7de254748ec3","Type":"ContainerStarted","Data":"49d35eb69c7da122dcb1388512ee93c2932309efd887ca939aebfff1446d19f7"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.756440 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.760641 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" event={"ID":"77a52e44-0fcb-4b97-93de-0d26a6901c37","Type":"ContainerStarted","Data":"04be3005d0ee4156612bd7681d111b4a158e2db12773d8c57d49e94eea59cd2c"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.764827 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.777508 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" event={"ID":"746154e5-b7a7-4ce9-b0db-4c88c998ccac","Type":"ContainerStarted","Data":"ca51302cd716346bbe3778c5718a2a690f3bcd9c750cb35e4d70bc21c058d36e"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.777564 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" event={"ID":"746154e5-b7a7-4ce9-b0db-4c88c998ccac","Type":"ContainerStarted","Data":"6db0041c580992cbef69255c328c382e9afa00d299c6a2b7be055239f8de4ce3"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.778193 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.792859 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" event={"ID":"c305906f-16d5-4e43-9666-299106995d65","Type":"ContainerStarted","Data":"81cbe4d5a0918b3a4d47973cf5a8626db495e4fb1d18a19bf81cb7c6272486d7"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.794935 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-89kmd" podStartSLOduration=2.471019636 podStartE2EDuration="35.794918574s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:48.266847232 +0000 UTC m=+1119.039739560" lastFinishedPulling="2025-12-02 18:55:21.59074617 +0000 UTC m=+1152.363638498" observedRunningTime="2025-12-02 18:55:22.790885692 +0000 UTC m=+1153.563778010" watchObservedRunningTime="2025-12-02 18:55:22.794918574 +0000 UTC m=+1153.567810902" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.828381 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-zm7n7" podStartSLOduration=3.5491142719999997 podStartE2EDuration="35.828367652s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.396335031 +0000 UTC m=+1120.169227359" lastFinishedPulling="2025-12-02 18:55:21.675588411 +0000 UTC m=+1152.448480739" observedRunningTime="2025-12-02 18:55:22.824927685 +0000 UTC m=+1153.597820013" watchObservedRunningTime="2025-12-02 18:55:22.828367652 +0000 UTC m=+1153.601259980" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.833770 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" event={"ID":"15e497ba-5375-4926-80f5-f46940572f8f","Type":"ContainerStarted","Data":"93eb2940c7a33881b680bf4684bbc196f8abe9c7828c5024719ebd93f13b12a3"} Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.834624 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.837707 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.898049 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-92mth" podStartSLOduration=3.502134032 podStartE2EDuration="35.898033979s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.396582188 +0000 UTC m=+1120.169474516" lastFinishedPulling="2025-12-02 18:55:21.792482125 +0000 UTC m=+1152.565374463" observedRunningTime="2025-12-02 18:55:22.894788266 +0000 UTC m=+1153.667680594" watchObservedRunningTime="2025-12-02 18:55:22.898033979 +0000 UTC m=+1153.670926307" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.971495 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-g8nwt" podStartSLOduration=3.716696971 podStartE2EDuration="35.971481351s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.408092969 +0000 UTC m=+1120.180985287" lastFinishedPulling="2025-12-02 18:55:21.662877339 +0000 UTC m=+1152.435769667" observedRunningTime="2025-12-02 18:55:22.964902714 +0000 UTC m=+1153.737795042" watchObservedRunningTime="2025-12-02 18:55:22.971481351 +0000 UTC m=+1153.744373679" Dec 02 18:55:22 crc kubenswrapper[4792]: I1202 18:55:22.971761 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xn9ms" podStartSLOduration=2.89133701 podStartE2EDuration="34.971756628s" podCreationTimestamp="2025-12-02 18:54:48 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.453355997 +0000 UTC m=+1120.226248325" lastFinishedPulling="2025-12-02 18:55:21.533775615 +0000 UTC m=+1152.306667943" observedRunningTime="2025-12-02 18:55:22.938777742 +0000 UTC m=+1153.711670070" watchObservedRunningTime="2025-12-02 18:55:22.971756628 +0000 UTC m=+1153.744648956" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.094148 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" podStartSLOduration=5.325873025 podStartE2EDuration="36.094129741s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.44005164 +0000 UTC m=+1120.212943968" lastFinishedPulling="2025-12-02 18:55:20.208308356 +0000 UTC m=+1150.981200684" observedRunningTime="2025-12-02 18:55:23.086935169 +0000 UTC m=+1153.859827487" watchObservedRunningTime="2025-12-02 18:55:23.094129741 +0000 UTC m=+1153.867022069" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.851893 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" event={"ID":"9f1a320f-5255-4fc4-b973-39ce2aee3bae","Type":"ContainerStarted","Data":"72f9e245005fb4178da1a00aa8f0c66a1151feede3abff3270c8b367e2b8a0b6"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.853126 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" Dec 02 18:55:23 crc kubenswrapper[4792]: E1202 18:55:23.854712 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" podUID="588c52cc-05c0-438d-bb0f-80bc1236d8cc" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.855302 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.857059 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" event={"ID":"4998553b-ffbc-4684-9756-22885fec1a98","Type":"ContainerStarted","Data":"8f5998d4a9a8bdbe31141235f186b99bc1dd7a33ebcc955f138dad9d3e46394e"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.863554 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" event={"ID":"ad6c4009-148b-4b91-bd36-4d9bd2a16bed","Type":"ContainerStarted","Data":"5e002d808b0013735e9ed18fb164010680931bc66a5d5e80412b633d49a80d45"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.863659 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.865248 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" event={"ID":"8bd01614-55c6-44bf-b67b-8a6570d9425c","Type":"ContainerStarted","Data":"d1c84a5f67c557d58b8b4bf28a4d85935b5f4c66bcaa3affa2486a9e0752d3ae"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.865709 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.874032 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" event={"ID":"dde05ba1-9b55-4f92-9782-d03fed8f26b0","Type":"ContainerStarted","Data":"018c36a5476d203f20d520ca4a7e4962985a8a182a3291e63fdc56624e2bc8eb"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.874180 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.876809 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" event={"ID":"a725d6d0-4642-4316-9e67-e002d58f7117","Type":"ContainerStarted","Data":"72a4df15ad841bca52e38021f47d9ecf998435612167ece6baf2dfb116f66ecc"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.876892 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.884293 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-fmggq" podStartSLOduration=3.818801401 podStartE2EDuration="36.884279637s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.031330606 +0000 UTC m=+1119.804222934" lastFinishedPulling="2025-12-02 18:55:22.096808842 +0000 UTC m=+1152.869701170" observedRunningTime="2025-12-02 18:55:23.875865394 +0000 UTC m=+1154.648757722" watchObservedRunningTime="2025-12-02 18:55:23.884279637 +0000 UTC m=+1154.657171965" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.885668 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.889925 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" event={"ID":"9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9","Type":"ContainerStarted","Data":"7e2a6abd8fafb83759b527d0f99f5c0885e0e8fe06d89eb88386aa346a1480a3"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.890638 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.916610 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" event={"ID":"851b5fce-f6b9-4fef-a80c-e66336c5fa49","Type":"ContainerStarted","Data":"113063e8c6493dba9fc708b75b77d7f7cc4a8c42aa25defbe8bfab524040aff8"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.917013 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.918161 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" podStartSLOduration=28.038883316 podStartE2EDuration="36.918146836s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:55:12.543813098 +0000 UTC m=+1143.316705426" lastFinishedPulling="2025-12-02 18:55:21.423076588 +0000 UTC m=+1152.195968946" observedRunningTime="2025-12-02 18:55:23.914187286 +0000 UTC m=+1154.687079624" watchObservedRunningTime="2025-12-02 18:55:23.918146836 +0000 UTC m=+1154.691039164" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.919697 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.925132 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" event={"ID":"fce25a63-01bd-458a-9567-f08f710abec9","Type":"ContainerStarted","Data":"aa41f047f87cb8732f0b9790073b19c5f23ad3d75afb2e02f97a53a759dac4df"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.925583 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.934232 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.945912 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" event={"ID":"64a3a015-bcba-4079-b30b-47579e9a7513","Type":"ContainerStarted","Data":"a9c437407a71482cd1fc29372bf2fb50f8b9a0ffcd22813fc7fd38a1f330de69"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.946867 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.948225 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.962207 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" event={"ID":"474ebfec-9504-4baa-a320-af5bd167bf33","Type":"ContainerStarted","Data":"045dbbff66a905886d08d879e8d215676e5094e91ea04fd06e5b2c526d8db205"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.962877 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.970147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" event={"ID":"ef4ea028-2f42-4560-aad7-94553ba2d3d4","Type":"ContainerStarted","Data":"45b769700dfb263ad2543ef4c895be0db358ad2a9efdb66bd654afd7077212a7"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.971054 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.975682 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.983280 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" event={"ID":"c305906f-16d5-4e43-9666-299106995d65","Type":"ContainerStarted","Data":"6c6ad2ed0bab4e3882d19233c676bdd6780abb8304a609d90e1be53f31b94937"} Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.983684 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.990902 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" podStartSLOduration=29.010286506 podStartE2EDuration="36.99088615s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:55:13.51565932 +0000 UTC m=+1144.288551638" lastFinishedPulling="2025-12-02 18:55:21.496258954 +0000 UTC m=+1152.269151282" observedRunningTime="2025-12-02 18:55:23.952895597 +0000 UTC m=+1154.725787925" watchObservedRunningTime="2025-12-02 18:55:23.99088615 +0000 UTC m=+1154.763778468" Dec 02 18:55:23 crc kubenswrapper[4792]: I1202 18:55:23.991737 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" podStartSLOduration=5.037921774 podStartE2EDuration="36.991731352s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.439756912 +0000 UTC m=+1120.212649240" lastFinishedPulling="2025-12-02 18:55:21.39356649 +0000 UTC m=+1152.166458818" observedRunningTime="2025-12-02 18:55:23.990695956 +0000 UTC m=+1154.763588284" watchObservedRunningTime="2025-12-02 18:55:23.991731352 +0000 UTC m=+1154.764623690" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.004838 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.010272 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.050373 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-98tg4" podStartSLOduration=4.58739324 podStartE2EDuration="37.050316827s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.377198566 +0000 UTC m=+1120.150090894" lastFinishedPulling="2025-12-02 18:55:21.840122143 +0000 UTC m=+1152.613014481" observedRunningTime="2025-12-02 18:55:24.035615705 +0000 UTC m=+1154.808508033" watchObservedRunningTime="2025-12-02 18:55:24.050316827 +0000 UTC m=+1154.823209155" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.069807 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-54d77c4c6-68vgq" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.086050 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" podStartSLOduration=4.264251437 podStartE2EDuration="37.086025433s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.389847647 +0000 UTC m=+1120.162739975" lastFinishedPulling="2025-12-02 18:55:22.211621643 +0000 UTC m=+1152.984513971" observedRunningTime="2025-12-02 18:55:24.070828238 +0000 UTC m=+1154.843720566" watchObservedRunningTime="2025-12-02 18:55:24.086025433 +0000 UTC m=+1154.858917761" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.223073 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" podStartSLOduration=5.891906618 podStartE2EDuration="37.223053547s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.452492465 +0000 UTC m=+1120.225384783" lastFinishedPulling="2025-12-02 18:55:20.783639384 +0000 UTC m=+1151.556531712" observedRunningTime="2025-12-02 18:55:24.181993045 +0000 UTC m=+1154.954885373" watchObservedRunningTime="2025-12-02 18:55:24.223053547 +0000 UTC m=+1154.995945875" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.225023 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" podStartSLOduration=5.126798057 podStartE2EDuration="37.225016826s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.43887184 +0000 UTC m=+1120.211764168" lastFinishedPulling="2025-12-02 18:55:21.537090569 +0000 UTC m=+1152.309982937" observedRunningTime="2025-12-02 18:55:24.137920228 +0000 UTC m=+1154.910812576" watchObservedRunningTime="2025-12-02 18:55:24.225016826 +0000 UTC m=+1154.997909154" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.283649 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-cfpd2" podStartSLOduration=4.862268779 podStartE2EDuration="37.283632093s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.40379102 +0000 UTC m=+1120.176683348" lastFinishedPulling="2025-12-02 18:55:21.825154314 +0000 UTC m=+1152.598046662" observedRunningTime="2025-12-02 18:55:24.280651227 +0000 UTC m=+1155.053543555" watchObservedRunningTime="2025-12-02 18:55:24.283632093 +0000 UTC m=+1155.056524421" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.316595 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-b7kc7" podStartSLOduration=4.937350083 podStartE2EDuration="37.316557548s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.408092119 +0000 UTC m=+1120.180984447" lastFinishedPulling="2025-12-02 18:55:21.787299564 +0000 UTC m=+1152.560191912" observedRunningTime="2025-12-02 18:55:24.310583106 +0000 UTC m=+1155.083475434" watchObservedRunningTime="2025-12-02 18:55:24.316557548 +0000 UTC m=+1155.089449886" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.343343 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-7qgqw" podStartSLOduration=4.271448397 podStartE2EDuration="37.343325756s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:48.715667661 +0000 UTC m=+1119.488559989" lastFinishedPulling="2025-12-02 18:55:21.78754501 +0000 UTC m=+1152.560437348" observedRunningTime="2025-12-02 18:55:24.334887442 +0000 UTC m=+1155.107779770" watchObservedRunningTime="2025-12-02 18:55:24.343325756 +0000 UTC m=+1155.116218074" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.367282 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-zpmp6" podStartSLOduration=3.888344023 podStartE2EDuration="37.367266333s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:48.707247758 +0000 UTC m=+1119.480140086" lastFinishedPulling="2025-12-02 18:55:22.186170068 +0000 UTC m=+1152.959062396" observedRunningTime="2025-12-02 18:55:24.363064007 +0000 UTC m=+1155.135956335" watchObservedRunningTime="2025-12-02 18:55:24.367266333 +0000 UTC m=+1155.140158661" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.401944 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" podStartSLOduration=5.424153217 podStartE2EDuration="37.401925022s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.415569079 +0000 UTC m=+1120.188461407" lastFinishedPulling="2025-12-02 18:55:21.393340854 +0000 UTC m=+1152.166233212" observedRunningTime="2025-12-02 18:55:24.40025802 +0000 UTC m=+1155.173150358" watchObservedRunningTime="2025-12-02 18:55:24.401925022 +0000 UTC m=+1155.174817340" Dec 02 18:55:24 crc kubenswrapper[4792]: I1202 18:55:24.446830 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" podStartSLOduration=3.152585786 podStartE2EDuration="37.44681127s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:48.996892852 +0000 UTC m=+1119.769785170" lastFinishedPulling="2025-12-02 18:55:23.291118326 +0000 UTC m=+1154.064010654" observedRunningTime="2025-12-02 18:55:24.443504317 +0000 UTC m=+1155.216396645" watchObservedRunningTime="2025-12-02 18:55:24.44681127 +0000 UTC m=+1155.219703598" Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.011406 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-qrj8t" event={"ID":"8becc537-85f1-4b33-8b6a-1ef3bc550cdd","Type":"ContainerStarted","Data":"ffc113fec48c4f7eb0f1bccd67069f37b5f9845a5dbfc4fc25d232fb62d5ee11"} Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.013462 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" event={"ID":"77a52e44-0fcb-4b97-93de-0d26a6901c37","Type":"ContainerStarted","Data":"f96390273e4b1812b0a019ab485b211d8b57fe550181cedf2ab25702824d88af"} Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.014051 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.016018 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" event={"ID":"588c52cc-05c0-438d-bb0f-80bc1236d8cc","Type":"ContainerStarted","Data":"54739fcbddac28b08a2c4c56aa9bd1d0208d2160a7ea808c87f57ef6542f983a"} Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.016040 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" event={"ID":"588c52cc-05c0-438d-bb0f-80bc1236d8cc","Type":"ContainerStarted","Data":"e4f9473dd1bfbf98b3b22b3e0cc353042ebe8db60ef8fce1eca9726a2a7e2854"} Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.016354 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.020098 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.038010 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" podStartSLOduration=3.6056576849999997 podStartE2EDuration="38.037990191s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:48.996549774 +0000 UTC m=+1119.769442102" lastFinishedPulling="2025-12-02 18:55:23.42888228 +0000 UTC m=+1154.201774608" observedRunningTime="2025-12-02 18:55:25.032678436 +0000 UTC m=+1155.805570774" watchObservedRunningTime="2025-12-02 18:55:25.037990191 +0000 UTC m=+1155.810882529" Dec 02 18:55:25 crc kubenswrapper[4792]: I1202 18:55:25.053157 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" podStartSLOduration=2.958044504 podStartE2EDuration="38.053142595s" podCreationTimestamp="2025-12-02 18:54:47 +0000 UTC" firstStartedPulling="2025-12-02 18:54:49.336665968 +0000 UTC m=+1120.109558296" lastFinishedPulling="2025-12-02 18:55:24.431764059 +0000 UTC m=+1155.204656387" observedRunningTime="2025-12-02 18:55:25.050033786 +0000 UTC m=+1155.822926124" watchObservedRunningTime="2025-12-02 18:55:25.053142595 +0000 UTC m=+1155.826034933" Dec 02 18:55:26 crc kubenswrapper[4792]: I1202 18:55:26.028880 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj" Dec 02 18:55:28 crc kubenswrapper[4792]: I1202 18:55:28.053104 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-ddpv4" Dec 02 18:55:28 crc kubenswrapper[4792]: I1202 18:55:28.203117 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kqqtk" Dec 02 18:55:28 crc kubenswrapper[4792]: I1202 18:55:28.270667 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-6cc9d48475-tplrw" Dec 02 18:55:28 crc kubenswrapper[4792]: I1202 18:55:28.327908 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d922f" Dec 02 18:55:28 crc kubenswrapper[4792]: I1202 18:55:28.403275 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-6v9cs" Dec 02 18:55:33 crc kubenswrapper[4792]: I1202 18:55:33.690895 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-l4tvg" Dec 02 18:55:37 crc kubenswrapper[4792]: I1202 18:55:37.699921 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xbg4j" Dec 02 18:55:37 crc kubenswrapper[4792]: I1202 18:55:37.819365 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-xrbqg" Dec 02 18:55:38 crc kubenswrapper[4792]: I1202 18:55:38.049624 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-ksx7c" Dec 02 18:55:38 crc kubenswrapper[4792]: I1202 18:55:38.081567 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:55:38 crc kubenswrapper[4792]: I1202 18:55:38.081640 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:55:38 crc kubenswrapper[4792]: I1202 18:55:38.081700 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:55:38 crc kubenswrapper[4792]: I1202 18:55:38.082644 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"38cdfcc4ac221e244e725dd6d1a5012b531f01f9bc318459a440b509a9410447"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 18:55:38 crc kubenswrapper[4792]: I1202 18:55:38.082746 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://38cdfcc4ac221e244e725dd6d1a5012b531f01f9bc318459a440b509a9410447" gracePeriod=600 Dec 02 18:55:40 crc kubenswrapper[4792]: I1202 18:55:40.153605 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="38cdfcc4ac221e244e725dd6d1a5012b531f01f9bc318459a440b509a9410447" exitCode=0 Dec 02 18:55:40 crc kubenswrapper[4792]: I1202 18:55:40.153667 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"38cdfcc4ac221e244e725dd6d1a5012b531f01f9bc318459a440b509a9410447"} Dec 02 18:55:40 crc kubenswrapper[4792]: I1202 18:55:40.153890 4792 scope.go:117] "RemoveContainer" containerID="51777911bdc2fd4cba93567e9367b9a4b299d12dfa4458a4ce8aa3d35773b2ea" Dec 02 18:55:41 crc kubenswrapper[4792]: I1202 18:55:41.194185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"dc04fc6a49e6cf5090b01aa7a72bef9189df42704e3c79ebf0699d5a961190bd"} Dec 02 18:55:54 crc kubenswrapper[4792]: I1202 18:55:54.802832 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cnzmz"] Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.805222 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.808266 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.808474 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.808638 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.808772 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xfrtf" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.814060 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cnzmz"] Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.835614 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e0c811b-ea81-495e-b33d-6bc8419830d1-config\") pod \"dnsmasq-dns-675f4bcbfc-cnzmz\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.835733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b47vl\" (UniqueName: \"kubernetes.io/projected/9e0c811b-ea81-495e-b33d-6bc8419830d1-kube-api-access-b47vl\") pod \"dnsmasq-dns-675f4bcbfc-cnzmz\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.869342 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd9fc"] Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.870838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.874191 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.881739 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd9fc"] Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.937398 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.937446 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgfkw\" (UniqueName: \"kubernetes.io/projected/17e12315-5bc5-4986-a5cd-7e788575f03b-kube-api-access-bgfkw\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.937634 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b47vl\" (UniqueName: \"kubernetes.io/projected/9e0c811b-ea81-495e-b33d-6bc8419830d1-kube-api-access-b47vl\") pod \"dnsmasq-dns-675f4bcbfc-cnzmz\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.937689 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e0c811b-ea81-495e-b33d-6bc8419830d1-config\") pod \"dnsmasq-dns-675f4bcbfc-cnzmz\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.937716 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-config\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.938618 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e0c811b-ea81-495e-b33d-6bc8419830d1-config\") pod \"dnsmasq-dns-675f4bcbfc-cnzmz\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:54.962300 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b47vl\" (UniqueName: \"kubernetes.io/projected/9e0c811b-ea81-495e-b33d-6bc8419830d1-kube-api-access-b47vl\") pod \"dnsmasq-dns-675f4bcbfc-cnzmz\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.038774 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.039195 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgfkw\" (UniqueName: \"kubernetes.io/projected/17e12315-5bc5-4986-a5cd-7e788575f03b-kube-api-access-bgfkw\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.039255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-config\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.039806 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.040160 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-config\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.067321 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgfkw\" (UniqueName: \"kubernetes.io/projected/17e12315-5bc5-4986-a5cd-7e788575f03b-kube-api-access-bgfkw\") pod \"dnsmasq-dns-78dd6ddcc-hd9fc\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.126125 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:55:55 crc kubenswrapper[4792]: I1202 18:55:55.184761 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:55:56 crc kubenswrapper[4792]: I1202 18:55:56.260953 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cnzmz"] Dec 02 18:55:56 crc kubenswrapper[4792]: W1202 18:55:56.274877 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e0c811b_ea81_495e_b33d_6bc8419830d1.slice/crio-fb9b35ac44a720a7c498b082694e588d04f00b7cfdba6e03d1ca6e7209f9ef61 WatchSource:0}: Error finding container fb9b35ac44a720a7c498b082694e588d04f00b7cfdba6e03d1ca6e7209f9ef61: Status 404 returned error can't find the container with id fb9b35ac44a720a7c498b082694e588d04f00b7cfdba6e03d1ca6e7209f9ef61 Dec 02 18:55:56 crc kubenswrapper[4792]: I1202 18:55:56.282662 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd9fc"] Dec 02 18:55:56 crc kubenswrapper[4792]: W1202 18:55:56.291773 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17e12315_5bc5_4986_a5cd_7e788575f03b.slice/crio-9a9b62a72444b55c6b91a1d245f15a54f6f5ca37ca21ccca6e0b959d66ce834c WatchSource:0}: Error finding container 9a9b62a72444b55c6b91a1d245f15a54f6f5ca37ca21ccca6e0b959d66ce834c: Status 404 returned error can't find the container with id 9a9b62a72444b55c6b91a1d245f15a54f6f5ca37ca21ccca6e0b959d66ce834c Dec 02 18:55:56 crc kubenswrapper[4792]: I1202 18:55:56.357549 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" event={"ID":"17e12315-5bc5-4986-a5cd-7e788575f03b","Type":"ContainerStarted","Data":"9a9b62a72444b55c6b91a1d245f15a54f6f5ca37ca21ccca6e0b959d66ce834c"} Dec 02 18:55:56 crc kubenswrapper[4792]: I1202 18:55:56.359234 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" event={"ID":"9e0c811b-ea81-495e-b33d-6bc8419830d1","Type":"ContainerStarted","Data":"fb9b35ac44a720a7c498b082694e588d04f00b7cfdba6e03d1ca6e7209f9ef61"} Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.596298 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cnzmz"] Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.600408 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-fjdkj"] Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.602749 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.611026 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-fjdkj"] Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.785495 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-config\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.785583 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-dns-svc\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.785609 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8j8f\" (UniqueName: \"kubernetes.io/projected/4b100fb8-4ab3-4514-840a-98e861a2cc11-kube-api-access-k8j8f\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.839812 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd9fc"] Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.873157 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9hhkl"] Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.874784 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.890026 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9hhkl"] Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.890773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-config\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.890844 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-dns-svc\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.890870 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8j8f\" (UniqueName: \"kubernetes.io/projected/4b100fb8-4ab3-4514-840a-98e861a2cc11-kube-api-access-k8j8f\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.892055 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-config\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.892453 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-dns-svc\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.927916 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8j8f\" (UniqueName: \"kubernetes.io/projected/4b100fb8-4ab3-4514-840a-98e861a2cc11-kube-api-access-k8j8f\") pod \"dnsmasq-dns-666b6646f7-fjdkj\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.942151 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.991847 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnvwn\" (UniqueName: \"kubernetes.io/projected/6f2d96d0-f671-46cd-8e95-162a0773470d-kube-api-access-tnvwn\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.991942 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:57 crc kubenswrapper[4792]: I1202 18:55:57.992005 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-config\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.092991 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.093069 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-config\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.093108 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnvwn\" (UniqueName: \"kubernetes.io/projected/6f2d96d0-f671-46cd-8e95-162a0773470d-kube-api-access-tnvwn\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.094456 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.095097 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-config\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.127352 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnvwn\" (UniqueName: \"kubernetes.io/projected/6f2d96d0-f671-46cd-8e95-162a0773470d-kube-api-access-tnvwn\") pod \"dnsmasq-dns-57d769cc4f-9hhkl\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.200668 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.523857 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-fjdkj"] Dec 02 18:55:58 crc kubenswrapper[4792]: W1202 18:55:58.542964 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b100fb8_4ab3_4514_840a_98e861a2cc11.slice/crio-96880bc2e2b80026631a59a9b1ddf9942168319b99b264fd616fc409c8e10892 WatchSource:0}: Error finding container 96880bc2e2b80026631a59a9b1ddf9942168319b99b264fd616fc409c8e10892: Status 404 returned error can't find the container with id 96880bc2e2b80026631a59a9b1ddf9942168319b99b264fd616fc409c8e10892 Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.693881 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9hhkl"] Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.715767 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.718090 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.720805 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.728677 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-cnkhh" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.728835 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.729100 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.729288 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.729311 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.729463 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.735653 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.913956 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.913995 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914024 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914080 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914136 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914210 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdbx6\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-kube-api-access-gdbx6\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914230 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914283 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914353 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914369 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.914391 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-config-data\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.981514 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.982782 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.985418 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.986599 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.986817 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.986913 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.987017 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.987345 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.987502 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zsftm" Dec 02 18:55:58 crc kubenswrapper[4792]: I1202 18:55:58.990384 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016142 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016186 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdbx6\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-kube-api-access-gdbx6\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016231 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016268 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016285 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016308 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-config-data\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016352 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016375 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016399 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.016418 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.018095 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.018211 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.018742 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-config-data\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.019739 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.020421 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.021903 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.021948 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6f7cf48cd4718bf0e3e1786efa8d94ee11134172315a8e6b7e6f42a5f9d770d6/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.024544 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.024797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.024902 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.034773 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdbx6\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-kube-api-access-gdbx6\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.053576 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.073715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118642 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118710 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118740 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118774 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118802 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-756l7\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-kube-api-access-756l7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118850 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118883 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3d0661cf-534a-4951-9e56-7db65fdfd242-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118904 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3d0661cf-534a-4951-9e56-7db65fdfd242-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118940 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.118997 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.221135 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.221195 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.221216 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222038 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222059 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-756l7\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-kube-api-access-756l7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222099 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222126 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3d0661cf-534a-4951-9e56-7db65fdfd242-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222142 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3d0661cf-534a-4951-9e56-7db65fdfd242-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222167 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.224442 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.224472 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.223187 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.224558 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d70c3530c6f343b08b17089f91da552efef4f7706ca172792211e9d032865e36/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222635 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.224408 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.223387 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.222956 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.223450 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.226959 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3d0661cf-534a-4951-9e56-7db65fdfd242-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.229485 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.229906 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.243476 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-756l7\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-kube-api-access-756l7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.246470 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3d0661cf-534a-4951-9e56-7db65fdfd242-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.272445 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.317763 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.353364 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.403778 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" event={"ID":"6f2d96d0-f671-46cd-8e95-162a0773470d","Type":"ContainerStarted","Data":"bd8870b20b018b9bd59ee44e9c18561ddc3b7f43b4c7000f2b5622a0f62a7c8b"} Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.409565 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" event={"ID":"4b100fb8-4ab3-4514-840a-98e861a2cc11","Type":"ContainerStarted","Data":"96880bc2e2b80026631a59a9b1ddf9942168319b99b264fd616fc409c8e10892"} Dec 02 18:55:59 crc kubenswrapper[4792]: I1202 18:55:59.790356 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 18:55:59 crc kubenswrapper[4792]: W1202 18:55:59.813054 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc7022cd7_3ce3_4e20_b067_5bdb3a39a0ba.slice/crio-e29bba0e752559fb8b4e00d5a165c27578bfb7b76f8e49d6e15bc23a5db6b426 WatchSource:0}: Error finding container e29bba0e752559fb8b4e00d5a165c27578bfb7b76f8e49d6e15bc23a5db6b426: Status 404 returned error can't find the container with id e29bba0e752559fb8b4e00d5a165c27578bfb7b76f8e49d6e15bc23a5db6b426 Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.136596 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.428404 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba","Type":"ContainerStarted","Data":"e29bba0e752559fb8b4e00d5a165c27578bfb7b76f8e49d6e15bc23a5db6b426"} Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.429459 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3d0661cf-534a-4951-9e56-7db65fdfd242","Type":"ContainerStarted","Data":"c24096e3121c825293ace7f19aa00c6dc24a745a43a9ddaa9cd40ee4ae29594d"} Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.570689 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.572372 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.573985 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.575878 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.576616 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.576716 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-gdsp5" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.581983 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.582774 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764621 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764718 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-config-data-default\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764743 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wvpt\" (UniqueName: \"kubernetes.io/projected/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-kube-api-access-2wvpt\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764768 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764793 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-kolla-config\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764809 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764826 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.764845 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868105 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868180 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-config-data-default\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868209 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wvpt\" (UniqueName: \"kubernetes.io/projected/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-kube-api-access-2wvpt\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868239 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868273 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-kolla-config\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868298 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868320 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.868343 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.870064 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-kolla-config\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.870366 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.872411 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.878164 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-config-data-default\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.904657 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.904725 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.904803 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.904835 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1194363e120de7918d13b5f7a892882c48af3dc8827ff2ccecac76c9627d6757/globalmount\"" pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.906624 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wvpt\" (UniqueName: \"kubernetes.io/projected/0c5e3683-f4d1-4f32-8c6d-ecc11415c660-kube-api-access-2wvpt\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:00 crc kubenswrapper[4792]: I1202 18:56:00.943132 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9ec0855c-8e7d-4e70-921c-988c3d2f2dbd\") pod \"openstack-galera-0\" (UID: \"0c5e3683-f4d1-4f32-8c6d-ecc11415c660\") " pod="openstack/openstack-galera-0" Dec 02 18:56:01 crc kubenswrapper[4792]: I1202 18:56:01.240032 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 18:56:01 crc kubenswrapper[4792]: I1202 18:56:01.876398 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.003321 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.005571 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.008928 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-z7h2h" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.009807 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.011042 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.011178 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.043969 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200620 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/91bf1c70-0d0f-49f9-aae7-59865a7abd26-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200682 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200722 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200746 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91bf1c70-0d0f-49f9-aae7-59865a7abd26-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200765 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200796 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91bf1c70-0d0f-49f9-aae7-59865a7abd26-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sml7m\" (UniqueName: \"kubernetes.io/projected/91bf1c70-0d0f-49f9-aae7-59865a7abd26-kube-api-access-sml7m\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.200870 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.292300 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.296713 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.301071 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.301245 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.301325 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-8gmht" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.314750 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/91bf1c70-0d0f-49f9-aae7-59865a7abd26-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.314857 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.314957 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.314997 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91bf1c70-0d0f-49f9-aae7-59865a7abd26-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.315021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.315091 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91bf1c70-0d0f-49f9-aae7-59865a7abd26-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.315153 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sml7m\" (UniqueName: \"kubernetes.io/projected/91bf1c70-0d0f-49f9-aae7-59865a7abd26-kube-api-access-sml7m\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.315247 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.317690 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.319647 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.320965 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.323188 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91bf1c70-0d0f-49f9-aae7-59865a7abd26-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.325771 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91bf1c70-0d0f-49f9-aae7-59865a7abd26-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.327741 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/91bf1c70-0d0f-49f9-aae7-59865a7abd26-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.345859 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91bf1c70-0d0f-49f9-aae7-59865a7abd26-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.350309 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sml7m\" (UniqueName: \"kubernetes.io/projected/91bf1c70-0d0f-49f9-aae7-59865a7abd26-kube-api-access-sml7m\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.350435 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.350465 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2478c573a57b176d1f8dc290c75cf1306b84865a7746e7f2434aaa60ac0a7e9f/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.418883 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2951974e-17c4-4cf6-b244-6efc7a6fc742-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.418953 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2951974e-17c4-4cf6-b244-6efc7a6fc742-config-data\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.419013 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2951974e-17c4-4cf6-b244-6efc7a6fc742-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.419032 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2951974e-17c4-4cf6-b244-6efc7a6fc742-kolla-config\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.419063 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6plz\" (UniqueName: \"kubernetes.io/projected/2951974e-17c4-4cf6-b244-6efc7a6fc742-kube-api-access-m6plz\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.429577 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-86e9867a-ec1c-4190-b52f-7caeb0308802\") pod \"openstack-cell1-galera-0\" (UID: \"91bf1c70-0d0f-49f9-aae7-59865a7abd26\") " pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.520541 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2951974e-17c4-4cf6-b244-6efc7a6fc742-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.520586 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2951974e-17c4-4cf6-b244-6efc7a6fc742-kolla-config\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.520639 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6plz\" (UniqueName: \"kubernetes.io/projected/2951974e-17c4-4cf6-b244-6efc7a6fc742-kube-api-access-m6plz\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.520706 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2951974e-17c4-4cf6-b244-6efc7a6fc742-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.520751 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2951974e-17c4-4cf6-b244-6efc7a6fc742-config-data\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.521800 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2951974e-17c4-4cf6-b244-6efc7a6fc742-config-data\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.522162 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2951974e-17c4-4cf6-b244-6efc7a6fc742-kolla-config\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.530026 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2951974e-17c4-4cf6-b244-6efc7a6fc742-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.539291 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6plz\" (UniqueName: \"kubernetes.io/projected/2951974e-17c4-4cf6-b244-6efc7a6fc742-kube-api-access-m6plz\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.539338 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2951974e-17c4-4cf6-b244-6efc7a6fc742-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2951974e-17c4-4cf6-b244-6efc7a6fc742\") " pod="openstack/memcached-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.644399 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:02 crc kubenswrapper[4792]: I1202 18:56:02.745945 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 18:56:03 crc kubenswrapper[4792]: I1202 18:56:03.883117 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 18:56:03 crc kubenswrapper[4792]: I1202 18:56:03.884557 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 18:56:03 crc kubenswrapper[4792]: I1202 18:56:03.892944 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2lc7q" Dec 02 18:56:03 crc kubenswrapper[4792]: I1202 18:56:03.898588 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.069996 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqrdh\" (UniqueName: \"kubernetes.io/projected/3a9324ab-6fb7-4057-bf68-e74e0907aa80-kube-api-access-lqrdh\") pod \"kube-state-metrics-0\" (UID: \"3a9324ab-6fb7-4057-bf68-e74e0907aa80\") " pod="openstack/kube-state-metrics-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.171469 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqrdh\" (UniqueName: \"kubernetes.io/projected/3a9324ab-6fb7-4057-bf68-e74e0907aa80-kube-api-access-lqrdh\") pod \"kube-state-metrics-0\" (UID: \"3a9324ab-6fb7-4057-bf68-e74e0907aa80\") " pod="openstack/kube-state-metrics-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.234761 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqrdh\" (UniqueName: \"kubernetes.io/projected/3a9324ab-6fb7-4057-bf68-e74e0907aa80-kube-api-access-lqrdh\") pod \"kube-state-metrics-0\" (UID: \"3a9324ab-6fb7-4057-bf68-e74e0907aa80\") " pod="openstack/kube-state-metrics-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.510421 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.598424 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.600218 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.605516 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.605659 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.605792 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.605838 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.605916 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-sv6nw" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.613265 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.681721 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.681808 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/26d86aa8-79aa-4d9b-ac24-155924920219-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.682185 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/26d86aa8-79aa-4d9b-ac24-155924920219-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.682234 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.682261 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.682341 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/26d86aa8-79aa-4d9b-ac24-155924920219-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.682360 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww2vq\" (UniqueName: \"kubernetes.io/projected/26d86aa8-79aa-4d9b-ac24-155924920219-kube-api-access-ww2vq\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.783222 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.783276 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/26d86aa8-79aa-4d9b-ac24-155924920219-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.783303 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/26d86aa8-79aa-4d9b-ac24-155924920219-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.783323 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.783340 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.783375 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww2vq\" (UniqueName: \"kubernetes.io/projected/26d86aa8-79aa-4d9b-ac24-155924920219-kube-api-access-ww2vq\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.783396 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/26d86aa8-79aa-4d9b-ac24-155924920219-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.784974 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/26d86aa8-79aa-4d9b-ac24-155924920219-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.786786 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.787192 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/26d86aa8-79aa-4d9b-ac24-155924920219-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.790507 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/26d86aa8-79aa-4d9b-ac24-155924920219-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.792045 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.803150 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww2vq\" (UniqueName: \"kubernetes.io/projected/26d86aa8-79aa-4d9b-ac24-155924920219-kube-api-access-ww2vq\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.807361 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/26d86aa8-79aa-4d9b-ac24-155924920219-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"26d86aa8-79aa-4d9b-ac24-155924920219\") " pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:04 crc kubenswrapper[4792]: I1202 18:56:04.934688 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.312127 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.314365 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.319068 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-db9tf" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.319179 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.319252 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.319447 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.319529 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.319741 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.325869 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500125 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500202 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500229 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-config\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500400 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26gl8\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-kube-api-access-26gl8\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500435 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500458 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/357feab9-6738-4c52-8478-0763a304671f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500492 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/357feab9-6738-4c52-8478-0763a304671f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.500547 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601465 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601484 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-config\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601553 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26gl8\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-kube-api-access-26gl8\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601575 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601593 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/357feab9-6738-4c52-8478-0763a304671f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601612 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/357feab9-6738-4c52-8478-0763a304671f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.601639 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.603568 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/357feab9-6738-4c52-8478-0763a304671f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.605122 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/357feab9-6738-4c52-8478-0763a304671f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.607006 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.608345 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.608389 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/14a22ce14f64bd088e22b4e9468d294093eded8b3f54a4c5617279316681f14a/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.611689 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.615810 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.620968 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26gl8\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-kube-api-access-26gl8\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.626423 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-config\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.657570 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:05 crc kubenswrapper[4792]: I1202 18:56:05.941042 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.904825 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.908440 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.913416 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.913638 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-j6r8m" Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.913796 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.913961 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.914098 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 02 18:56:07 crc kubenswrapper[4792]: I1202 18:56:07.947307 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.049913 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnsvt\" (UniqueName: \"kubernetes.io/projected/db787f15-5115-48a8-9443-93f5da555d2a-kube-api-access-qnsvt\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.055787 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.055823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.055985 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.056036 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.056068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db787f15-5115-48a8-9443-93f5da555d2a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.056105 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db787f15-5115-48a8-9443-93f5da555d2a-config\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.056177 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db787f15-5115-48a8-9443-93f5da555d2a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157149 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db787f15-5115-48a8-9443-93f5da555d2a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157219 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnsvt\" (UniqueName: \"kubernetes.io/projected/db787f15-5115-48a8-9443-93f5da555d2a-kube-api-access-qnsvt\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157241 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157259 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157319 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157345 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157363 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db787f15-5115-48a8-9443-93f5da555d2a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.157389 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db787f15-5115-48a8-9443-93f5da555d2a-config\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.158358 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db787f15-5115-48a8-9443-93f5da555d2a-config\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.159211 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db787f15-5115-48a8-9443-93f5da555d2a-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.162936 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db787f15-5115-48a8-9443-93f5da555d2a-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.167082 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.167115 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8d64c6a030de4c7b7dcf47af26417749610f1da9491876f2c6bc45d7c327d9d5/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.167283 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.167310 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.174552 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db787f15-5115-48a8-9443-93f5da555d2a-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.180232 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnsvt\" (UniqueName: \"kubernetes.io/projected/db787f15-5115-48a8-9443-93f5da555d2a-kube-api-access-qnsvt\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.199219 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a0e5ef79-b7f0-485b-837a-77b9cc1068ef\") pod \"ovsdbserver-nb-0\" (UID: \"db787f15-5115-48a8-9443-93f5da555d2a\") " pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.241359 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.564725 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c5e3683-f4d1-4f32-8c6d-ecc11415c660","Type":"ContainerStarted","Data":"e02830da499c224b0ba8565db4033ed706fe44ea418cdf576b4aad4478d491d7"} Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.767100 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-s44lp"] Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.768486 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.772861 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-qqz29" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.777683 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.778582 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.780573 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-gpsrm"] Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.782288 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.794114 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s44lp"] Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.812150 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gpsrm"] Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.880624 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tmmf\" (UniqueName: \"kubernetes.io/projected/2a5ad51d-6996-42c0-b156-600ff9dc7782-kube-api-access-6tmmf\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.880665 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a5ad51d-6996-42c0-b156-600ff9dc7782-scripts\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.880692 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5ad51d-6996-42c0-b156-600ff9dc7782-combined-ca-bundle\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.880752 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-log-ovn\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.880777 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a5ad51d-6996-42c0-b156-600ff9dc7782-ovn-controller-tls-certs\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.880801 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-run-ovn\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.880820 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-run\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982240 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-lib\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982295 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-etc-ovs\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982322 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tmmf\" (UniqueName: \"kubernetes.io/projected/2a5ad51d-6996-42c0-b156-600ff9dc7782-kube-api-access-6tmmf\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982360 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a5ad51d-6996-42c0-b156-600ff9dc7782-scripts\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982392 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5ad51d-6996-42c0-b156-600ff9dc7782-combined-ca-bundle\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982428 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0abc322-ef0d-468b-9d23-4e2acd50b51a-scripts\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982480 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-run\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982591 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-log-ovn\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982618 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-log\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982659 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a5ad51d-6996-42c0-b156-600ff9dc7782-ovn-controller-tls-certs\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982676 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5ztv\" (UniqueName: \"kubernetes.io/projected/d0abc322-ef0d-468b-9d23-4e2acd50b51a-kube-api-access-z5ztv\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982874 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-run-ovn\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.982927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-run\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.984501 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2a5ad51d-6996-42c0-b156-600ff9dc7782-scripts\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.987042 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a5ad51d-6996-42c0-b156-600ff9dc7782-ovn-controller-tls-certs\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:08 crc kubenswrapper[4792]: I1202 18:56:08.987185 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a5ad51d-6996-42c0-b156-600ff9dc7782-combined-ca-bundle\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.041855 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-log-ovn\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.042093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-run-ovn\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.042125 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tmmf\" (UniqueName: \"kubernetes.io/projected/2a5ad51d-6996-42c0-b156-600ff9dc7782-kube-api-access-6tmmf\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.042210 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2a5ad51d-6996-42c0-b156-600ff9dc7782-var-run\") pod \"ovn-controller-s44lp\" (UID: \"2a5ad51d-6996-42c0-b156-600ff9dc7782\") " pod="openstack/ovn-controller-s44lp" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084255 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-log\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084325 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5ztv\" (UniqueName: \"kubernetes.io/projected/d0abc322-ef0d-468b-9d23-4e2acd50b51a-kube-api-access-z5ztv\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084387 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-lib\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084409 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-etc-ovs\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084471 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0abc322-ef0d-468b-9d23-4e2acd50b51a-scripts\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084496 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-run\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084548 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-log\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084631 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-run\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084776 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-etc-ovs\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.084992 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d0abc322-ef0d-468b-9d23-4e2acd50b51a-var-lib\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.087358 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0abc322-ef0d-468b-9d23-4e2acd50b51a-scripts\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.105141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5ztv\" (UniqueName: \"kubernetes.io/projected/d0abc322-ef0d-468b-9d23-4e2acd50b51a-kube-api-access-z5ztv\") pod \"ovn-controller-ovs-gpsrm\" (UID: \"d0abc322-ef0d-468b-9d23-4e2acd50b51a\") " pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.139657 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp" Dec 02 18:56:09 crc kubenswrapper[4792]: I1202 18:56:09.179898 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.697747 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.699642 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.705120 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.705858 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-2hvwt" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.718275 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.750799 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.750864 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.847748 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.847999 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6cfe9a05-cb43-47d3-84f8-95642cd098ec-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.848030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.848052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg4fx\" (UniqueName: \"kubernetes.io/projected/6cfe9a05-cb43-47d3-84f8-95642cd098ec-kube-api-access-qg4fx\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.848074 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cfe9a05-cb43-47d3-84f8-95642cd098ec-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.848093 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.848158 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cfe9a05-cb43-47d3-84f8-95642cd098ec-config\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.848206 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949451 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6cfe9a05-cb43-47d3-84f8-95642cd098ec-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949508 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949543 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg4fx\" (UniqueName: \"kubernetes.io/projected/6cfe9a05-cb43-47d3-84f8-95642cd098ec-kube-api-access-qg4fx\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949565 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cfe9a05-cb43-47d3-84f8-95642cd098ec-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949584 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949653 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cfe9a05-cb43-47d3-84f8-95642cd098ec-config\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.949816 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.951261 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6cfe9a05-cb43-47d3-84f8-95642cd098ec-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.952138 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6cfe9a05-cb43-47d3-84f8-95642cd098ec-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.952308 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cfe9a05-cb43-47d3-84f8-95642cd098ec-config\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.955409 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.955909 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.956806 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cfe9a05-cb43-47d3-84f8-95642cd098ec-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.957886 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.958019 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6f9856a307bac08cb4a875bf6860f3755d67e7855b76843906497aa548294c49/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.968412 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg4fx\" (UniqueName: \"kubernetes.io/projected/6cfe9a05-cb43-47d3-84f8-95642cd098ec-kube-api-access-qg4fx\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:12 crc kubenswrapper[4792]: I1202 18:56:12.984632 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-437117e3-70ba-46f6-98b0-bed2416ddab7\") pod \"ovsdbserver-sb-0\" (UID: \"6cfe9a05-cb43-47d3-84f8-95642cd098ec\") " pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.070626 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.371737 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.378981 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.381361 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.382082 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-6zv5t" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.382195 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.383753 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.384163 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.396311 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.517878 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.518989 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.522847 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-loki-s3" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.522909 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.525329 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.530452 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.566861 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-config\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.566940 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.566999 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-kube-api-access-nmsjr\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.567980 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.568024 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.615631 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.616668 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.619800 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.620032 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.627094 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.669501 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmqgp\" (UniqueName: \"kubernetes.io/projected/376a394c-12c9-4fa9-b24a-841a6b05ba0b-kube-api-access-wmqgp\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.669870 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376a394c-12c9-4fa9-b24a-841a6b05ba0b-config\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.669936 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.669993 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.670014 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-kube-api-access-nmsjr\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.670054 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.670072 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.670106 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.670137 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.670157 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.670182 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-config\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.671657 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-config\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.671728 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.677160 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.679451 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.698351 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmsjr\" (UniqueName: \"kubernetes.io/projected/c11c6af2-cd99-41e8-b6cf-b86ab025bbfa-kube-api-access-nmsjr\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-lgcfh\" (UID: \"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.700253 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.748697 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.750146 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.753179 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.754496 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.757631 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.757934 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-l64bj" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.758109 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.758271 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.758392 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.758534 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.765158 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771217 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771284 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmqgp\" (UniqueName: \"kubernetes.io/projected/376a394c-12c9-4fa9-b24a-841a6b05ba0b-kube-api-access-wmqgp\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771309 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376a394c-12c9-4fa9-b24a-841a6b05ba0b-config\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771330 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771392 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771421 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rhp6\" (UniqueName: \"kubernetes.io/projected/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-kube-api-access-5rhp6\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771444 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771462 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771481 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771504 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-config\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.771561 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.773872 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.775386 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376a394c-12c9-4fa9-b24a-841a6b05ba0b-config\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.782951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.784697 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.784909 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/376a394c-12c9-4fa9-b24a-841a6b05ba0b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.788206 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.808358 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmqgp\" (UniqueName: \"kubernetes.io/projected/376a394c-12c9-4fa9-b24a-841a6b05ba0b-kube-api-access-wmqgp\") pod \"cloudkitty-lokistack-querier-548665d79b-vwc9f\" (UID: \"376a394c-12c9-4fa9-b24a-841a6b05ba0b\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.816659 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z"] Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.847674 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873368 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873413 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873435 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873478 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873495 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873534 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873562 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873586 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873608 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873634 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873659 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873676 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873698 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873714 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873734 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpd5p\" (UniqueName: \"kubernetes.io/projected/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-kube-api-access-kpd5p\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rhp6\" (UniqueName: \"kubernetes.io/projected/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-kube-api-access-5rhp6\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873787 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873807 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873833 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873857 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873883 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-config\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873916 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czx8s\" (UniqueName: \"kubernetes.io/projected/2f112377-5fcb-424f-9fa1-f92ab0608d82-kube-api-access-czx8s\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.873938 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.875964 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.876035 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-config\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.877040 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.879953 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.896732 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rhp6\" (UniqueName: \"kubernetes.io/projected/29663f5c-6fe7-42d5-8d53-c8d900e36a9c-kube-api-access-5rhp6\") pod \"cloudkitty-lokistack-query-frontend-779849886d-n28fq\" (UID: \"29663f5c-6fe7-42d5-8d53-c8d900e36a9c\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.944357 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.974971 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czx8s\" (UniqueName: \"kubernetes.io/projected/2f112377-5fcb-424f-9fa1-f92ab0608d82-kube-api-access-czx8s\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975060 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975080 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975124 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975144 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975175 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: E1202 18:56:13.975177 4792 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Dec 02 18:56:13 crc kubenswrapper[4792]: E1202 18:56:13.975646 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tls-secret podName:2f112377-5fcb-424f-9fa1-f92ab0608d82 nodeName:}" failed. No retries permitted until 2025-12-02 18:56:14.475627509 +0000 UTC m=+1205.248519837 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tls-secret") pod "cloudkitty-lokistack-gateway-76cc998948-wsd5z" (UID: "2f112377-5fcb-424f-9fa1-f92ab0608d82") : secret "cloudkitty-lokistack-gateway-http" not found Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975984 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.975194 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976069 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976090 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976101 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976146 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976179 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976198 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976219 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976239 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976261 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpd5p\" (UniqueName: \"kubernetes.io/projected/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-kube-api-access-kpd5p\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976282 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976298 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976319 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.976676 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: E1202 18:56:13.976982 4792 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Dec 02 18:56:13 crc kubenswrapper[4792]: E1202 18:56:13.977017 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tls-secret podName:a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4 nodeName:}" failed. No retries permitted until 2025-12-02 18:56:14.477008734 +0000 UTC m=+1205.249901062 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tls-secret") pod "cloudkitty-lokistack-gateway-76cc998948-wz9ls" (UID: "a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4") : secret "cloudkitty-lokistack-gateway-http" not found Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.977059 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.977320 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.977808 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.977897 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/2f112377-5fcb-424f-9fa1-f92ab0608d82-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.978099 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.978507 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.978967 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.981201 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.981252 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.981660 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.997922 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpd5p\" (UniqueName: \"kubernetes.io/projected/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-kube-api-access-kpd5p\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:13 crc kubenswrapper[4792]: I1202 18:56:13.998796 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czx8s\" (UniqueName: \"kubernetes.io/projected/2f112377-5fcb-424f-9fa1-f92ab0608d82-kube-api-access-czx8s\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.483373 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.483472 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.487352 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/2f112377-5fcb-424f-9fa1-f92ab0608d82-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wsd5z\" (UID: \"2f112377-5fcb-424f-9fa1-f92ab0608d82\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.501106 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-wz9ls\" (UID: \"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.506344 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.507385 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.510126 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.510323 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.527233 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.602667 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.604313 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.606830 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.607001 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.617240 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.672746 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.674581 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.675934 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.678100 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.678254 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.680951 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686512 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686611 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686654 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686681 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kx6h\" (UniqueName: \"kubernetes.io/projected/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-kube-api-access-8kx6h\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686729 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686849 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.686876 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.698182 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788296 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788368 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788400 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788460 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eab5f6f8-38fe-40ac-8407-4fc5044eba84-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788814 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788853 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788883 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788938 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.788971 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789049 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789127 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hndpf\" (UniqueName: \"kubernetes.io/projected/92495185-21e2-4db2-9b49-6c2b0267c324-kube-api-access-hndpf\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789178 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789197 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92495185-21e2-4db2-9b49-6c2b0267c324-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789240 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789257 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789285 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789302 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789327 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kx6h\" (UniqueName: \"kubernetes.io/projected/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-kube-api-access-8kx6h\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789351 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789373 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-655ks\" (UniqueName: \"kubernetes.io/projected/eab5f6f8-38fe-40ac-8407-4fc5044eba84-kube-api-access-655ks\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.789391 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.790435 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.791443 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.791488 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.791806 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.806127 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.806633 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.806701 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.810843 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kx6h\" (UniqueName: \"kubernetes.io/projected/63ad1bca-0ff4-4694-ab0a-56e8f5366d88-kube-api-access-8kx6h\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.821247 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.824187 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"63ad1bca-0ff4-4694-ab0a-56e8f5366d88\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.880262 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890546 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-655ks\" (UniqueName: \"kubernetes.io/projected/eab5f6f8-38fe-40ac-8407-4fc5044eba84-kube-api-access-655ks\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890650 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890685 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890705 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890729 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890753 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eab5f6f8-38fe-40ac-8407-4fc5044eba84-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890791 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890810 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890829 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890879 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hndpf\" (UniqueName: \"kubernetes.io/projected/92495185-21e2-4db2-9b49-6c2b0267c324-kube-api-access-hndpf\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890905 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92495185-21e2-4db2-9b49-6c2b0267c324-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890944 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.890963 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.891621 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.892368 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.893804 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.894658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.895977 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92495185-21e2-4db2-9b49-6c2b0267c324-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.896394 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eab5f6f8-38fe-40ac-8407-4fc5044eba84-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.896428 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.900262 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.900874 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.904716 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.904880 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/eab5f6f8-38fe-40ac-8407-4fc5044eba84-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.907328 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hndpf\" (UniqueName: \"kubernetes.io/projected/92495185-21e2-4db2-9b49-6c2b0267c324-kube-api-access-hndpf\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.908846 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-655ks\" (UniqueName: \"kubernetes.io/projected/eab5f6f8-38fe-40ac-8407-4fc5044eba84-kube-api-access-655ks\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.913842 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/92495185-21e2-4db2-9b49-6c2b0267c324-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.924554 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"eab5f6f8-38fe-40ac-8407-4fc5044eba84\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.930219 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.934312 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"92495185-21e2-4db2-9b49-6c2b0267c324\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:14 crc kubenswrapper[4792]: I1202 18:56:14.993640 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.053672 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.054387 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gdbx6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.055618 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.060685 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.060860 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-756l7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(3d0661cf-534a-4951-9e56-7db65fdfd242): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.062055 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.763445 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.763453 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.930743 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.931269 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b47vl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-cnzmz_openstack(9e0c811b-ea81-495e-b33d-6bc8419830d1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.932611 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" podUID="9e0c811b-ea81-495e-b33d-6bc8419830d1" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.936671 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.936883 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k8j8f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-fjdkj_openstack(4b100fb8-4ab3-4514-840a-98e861a2cc11): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.938719 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.964485 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.964661 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bgfkw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-hd9fc_openstack(17e12315-5bc5-4986-a5cd-7e788575f03b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.966004 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" podUID="17e12315-5bc5-4986-a5cd-7e788575f03b" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.991042 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.991228 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tnvwn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-9hhkl_openstack(6f2d96d0-f671-46cd-8e95-162a0773470d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:56:30 crc kubenswrapper[4792]: E1202 18:56:30.993156 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" Dec 02 18:56:31 crc kubenswrapper[4792]: E1202 18:56:31.769017 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" Dec 02 18:56:31 crc kubenswrapper[4792]: E1202 18:56:31.787658 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" Dec 02 18:56:33 crc kubenswrapper[4792]: E1202 18:56:33.352463 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 02 18:56:33 crc kubenswrapper[4792]: E1202 18:56:33.353331 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2wvpt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(0c5e3683-f4d1-4f32-8c6d-ecc11415c660): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:56:33 crc kubenswrapper[4792]: E1202 18:56:33.357632 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="0c5e3683-f4d1-4f32-8c6d-ecc11415c660" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.564388 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.565791 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.705379 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e0c811b-ea81-495e-b33d-6bc8419830d1-config\") pod \"9e0c811b-ea81-495e-b33d-6bc8419830d1\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.705602 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b47vl\" (UniqueName: \"kubernetes.io/projected/9e0c811b-ea81-495e-b33d-6bc8419830d1-kube-api-access-b47vl\") pod \"9e0c811b-ea81-495e-b33d-6bc8419830d1\" (UID: \"9e0c811b-ea81-495e-b33d-6bc8419830d1\") " Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.705679 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgfkw\" (UniqueName: \"kubernetes.io/projected/17e12315-5bc5-4986-a5cd-7e788575f03b-kube-api-access-bgfkw\") pod \"17e12315-5bc5-4986-a5cd-7e788575f03b\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.705713 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-dns-svc\") pod \"17e12315-5bc5-4986-a5cd-7e788575f03b\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.705790 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-config\") pod \"17e12315-5bc5-4986-a5cd-7e788575f03b\" (UID: \"17e12315-5bc5-4986-a5cd-7e788575f03b\") " Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.707681 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e0c811b-ea81-495e-b33d-6bc8419830d1-config" (OuterVolumeSpecName: "config") pod "9e0c811b-ea81-495e-b33d-6bc8419830d1" (UID: "9e0c811b-ea81-495e-b33d-6bc8419830d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.711881 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e0c811b-ea81-495e-b33d-6bc8419830d1-kube-api-access-b47vl" (OuterVolumeSpecName: "kube-api-access-b47vl") pod "9e0c811b-ea81-495e-b33d-6bc8419830d1" (UID: "9e0c811b-ea81-495e-b33d-6bc8419830d1"). InnerVolumeSpecName "kube-api-access-b47vl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.712478 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "17e12315-5bc5-4986-a5cd-7e788575f03b" (UID: "17e12315-5bc5-4986-a5cd-7e788575f03b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.712642 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-config" (OuterVolumeSpecName: "config") pod "17e12315-5bc5-4986-a5cd-7e788575f03b" (UID: "17e12315-5bc5-4986-a5cd-7e788575f03b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.714687 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17e12315-5bc5-4986-a5cd-7e788575f03b-kube-api-access-bgfkw" (OuterVolumeSpecName: "kube-api-access-bgfkw") pod "17e12315-5bc5-4986-a5cd-7e788575f03b" (UID: "17e12315-5bc5-4986-a5cd-7e788575f03b"). InnerVolumeSpecName "kube-api-access-bgfkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.792397 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" event={"ID":"9e0c811b-ea81-495e-b33d-6bc8419830d1","Type":"ContainerDied","Data":"fb9b35ac44a720a7c498b082694e588d04f00b7cfdba6e03d1ca6e7209f9ef61"} Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.792484 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-cnzmz" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.797055 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" event={"ID":"17e12315-5bc5-4986-a5cd-7e788575f03b","Type":"ContainerDied","Data":"9a9b62a72444b55c6b91a1d245f15a54f6f5ca37ca21ccca6e0b959d66ce834c"} Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.797071 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd9fc" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.807812 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b47vl\" (UniqueName: \"kubernetes.io/projected/9e0c811b-ea81-495e-b33d-6bc8419830d1-kube-api-access-b47vl\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.807847 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgfkw\" (UniqueName: \"kubernetes.io/projected/17e12315-5bc5-4986-a5cd-7e788575f03b-kube-api-access-bgfkw\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.807863 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.807876 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17e12315-5bc5-4986-a5cd-7e788575f03b-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:33 crc kubenswrapper[4792]: I1202 18:56:33.807909 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e0c811b-ea81-495e-b33d-6bc8419830d1-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:33 crc kubenswrapper[4792]: E1202 18:56:33.808021 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="0c5e3683-f4d1-4f32-8c6d-ecc11415c660" Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.006628 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cnzmz"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.015858 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-cnzmz"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.037073 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd9fc"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.037343 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd9fc"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.544610 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.565761 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls"] Dec 02 18:56:34 crc kubenswrapper[4792]: W1202 18:56:34.572120 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod376a394c_12c9_4fa9_b24a_841a6b05ba0b.slice/crio-0f56d3262f1dae3e6e7d5d854d460d1b719ef75c297d60396bb8f11a8607b76e WatchSource:0}: Error finding container 0f56d3262f1dae3e6e7d5d854d460d1b719ef75c297d60396bb8f11a8607b76e: Status 404 returned error can't find the container with id 0f56d3262f1dae3e6e7d5d854d460d1b719ef75c297d60396bb8f11a8607b76e Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.584616 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.600369 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.635810 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.655365 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.667268 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s44lp"] Dec 02 18:56:34 crc kubenswrapper[4792]: W1202 18:56:34.674502 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92495185_21e2_4db2_9b49_6c2b0267c324.slice/crio-0ca14148e8ff4f0df09c784461bd3fde99e280f488a1ccad96c0e0fdab0b4fb9 WatchSource:0}: Error finding container 0ca14148e8ff4f0df09c784461bd3fde99e280f488a1ccad96c0e0fdab0b4fb9: Status 404 returned error can't find the container with id 0ca14148e8ff4f0df09c784461bd3fde99e280f488a1ccad96c0e0fdab0b4fb9 Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.674596 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.680766 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: W1202 18:56:34.683098 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f112377_5fcb_424f_9fa1_f92ab0608d82.slice/crio-06a6ae6f5bb3d485ba7b9b4cda005a0ff79612630c76329034b9fe65ea10f387 WatchSource:0}: Error finding container 06a6ae6f5bb3d485ba7b9b4cda005a0ff79612630c76329034b9fe65ea10f387: Status 404 returned error can't find the container with id 06a6ae6f5bb3d485ba7b9b4cda005a0ff79612630c76329034b9fe65ea10f387 Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.685955 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.689832 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lqrdh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(3a9324ab-6fb7-4057-bf68-e74e0907aa80): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.691065 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/kube-state-metrics-0" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.694303 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.811329 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a9324ab-6fb7-4057-bf68-e74e0907aa80","Type":"ContainerStarted","Data":"6e4623a6ce87d895481d98f1f68512eab5a5bba03faa5351917d106a0595748d"} Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.814338 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.815229 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"92495185-21e2-4db2-9b49-6c2b0267c324","Type":"ContainerStarted","Data":"0ca14148e8ff4f0df09c784461bd3fde99e280f488a1ccad96c0e0fdab0b4fb9"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.820794 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"26d86aa8-79aa-4d9b-ac24-155924920219","Type":"ContainerStarted","Data":"8ac781c228322da9fa16d25756d8dddd69b501ad4f9624ab259545c4fea8145b"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.825354 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91bf1c70-0d0f-49f9-aae7-59865a7abd26","Type":"ContainerStarted","Data":"6c306465fa7943abde0e957804f17fccb4b0442bfd59238f05adeb13172ad9f4"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.834566 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerStarted","Data":"8d9ecf63bc9aa656be629a802e625b9ef643962e0fba09e89cfae70926543921"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.837244 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" event={"ID":"2f112377-5fcb-424f-9fa1-f92ab0608d82","Type":"ContainerStarted","Data":"06a6ae6f5bb3d485ba7b9b4cda005a0ff79612630c76329034b9fe65ea10f387"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.840104 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"63ad1bca-0ff4-4694-ab0a-56e8f5366d88","Type":"ContainerStarted","Data":"f4cc3a51298728e8e86410cfe178bc1aaaa2cb28e9c219ed31d7c1fa6061b6a2"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.843509 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp" event={"ID":"2a5ad51d-6996-42c0-b156-600ff9dc7782","Type":"ContainerStarted","Data":"753248590e17c857e2b1a2a3877e84bd1cb99f5a3305107680d6d0c3c542c06d"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.846931 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" event={"ID":"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4","Type":"ContainerStarted","Data":"a4aaa9710dd5c37920494630a64aaaabe85831000ff6a4185d31c080a12fedb7"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.856589 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gpsrm"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.868917 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"2951974e-17c4-4cf6-b244-6efc7a6fc742","Type":"ContainerStarted","Data":"3cee3e6be35dcbb902b26a269b48323e18c03b8bbebf2d52013c4ea0a7827369"} Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.880827 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" event={"ID":"376a394c-12c9-4fa9-b24a-841a6b05ba0b","Type":"ContainerStarted","Data":"0f56d3262f1dae3e6e7d5d854d460d1b719ef75c297d60396bb8f11a8607b76e"} Dec 02 18:56:34 crc kubenswrapper[4792]: W1202 18:56:34.889548 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0abc322_ef0d_468b_9d23_4e2acd50b51a.slice/crio-ffc232c71e4d1098f8da79ef5d5df8b66410bdd16ab1df4ee9112e880779012f WatchSource:0}: Error finding container ffc232c71e4d1098f8da79ef5d5df8b66410bdd16ab1df4ee9112e880779012f: Status 404 returned error can't find the container with id ffc232c71e4d1098f8da79ef5d5df8b66410bdd16ab1df4ee9112e880779012f Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.891392 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 02 18:56:34 crc kubenswrapper[4792]: W1202 18:56:34.891915 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeab5f6f8_38fe_40ac_8407_4fc5044eba84.slice/crio-7b75936f1f2410242eb27cd58777b066511b69eebbf52b20b46cca0cd62e64b2 WatchSource:0}: Error finding container 7b75936f1f2410242eb27cd58777b066511b69eebbf52b20b46cca0cd62e64b2: Status 404 returned error can't find the container with id 7b75936f1f2410242eb27cd58777b066511b69eebbf52b20b46cca0cd62e64b2 Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.896899 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-compactor,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51,Command:[],Args:[-target=compactor -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:AWS_ACCESS_KEY_ID,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_id,Optional:nil,},},},EnvVar{Name:AWS_ACCESS_KEY_SECRET,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_secret,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:storage,ReadOnly:false,MountPath:/tmp/loki,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-compactor-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-loki-s3,ReadOnly:false,MountPath:/etc/storage/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-compactor-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-655ks,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-compactor-0_openstack(eab5f6f8-38fe-40ac-8407-4fc5044eba84): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.898051 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-compactor\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/cloudkitty-lokistack-compactor-0" podUID="eab5f6f8-38fe-40ac-8407-4fc5044eba84" Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.899441 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-query-frontend,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51,Command:[],Args:[-target=query-frontend -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-query-frontend-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-query-frontend-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5rhp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-query-frontend-779849886d-n28fq_openstack(29663f5c-6fe7-42d5-8d53-c8d900e36a9c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.900711 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-query-frontend\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" podUID="29663f5c-6fe7-42d5-8d53-c8d900e36a9c" Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.900987 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-distributor,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51,Command:[],Args:[-target=distributor -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:gossip-ring,HostPort:0,ContainerPort:7946,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-distributor-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-distributor-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nmsjr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-distributor-56cd74f89f-lgcfh_openstack(c11c6af2-cd99-41e8-b6cf-b86ab025bbfa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 18:56:34 crc kubenswrapper[4792]: E1202 18:56:34.903075 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-distributor\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" podUID="c11c6af2-cd99-41e8-b6cf-b86ab025bbfa" Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.906022 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.916965 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh"] Dec 02 18:56:34 crc kubenswrapper[4792]: I1202 18:56:34.994581 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.570938 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17e12315-5bc5-4986-a5cd-7e788575f03b" path="/var/lib/kubelet/pods/17e12315-5bc5-4986-a5cd-7e788575f03b/volumes" Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.571638 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e0c811b-ea81-495e-b33d-6bc8419830d1" path="/var/lib/kubelet/pods/9e0c811b-ea81-495e-b33d-6bc8419830d1/volumes" Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.765315 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 18:56:35 crc kubenswrapper[4792]: W1202 18:56:35.774132 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cfe9a05_cb43_47d3_84f8_95642cd098ec.slice/crio-7bd0a1f3aae8bca9e2104610a3f7971df439ee3ddbc04a18b8c51ae6cd5f0081 WatchSource:0}: Error finding container 7bd0a1f3aae8bca9e2104610a3f7971df439ee3ddbc04a18b8c51ae6cd5f0081: Status 404 returned error can't find the container with id 7bd0a1f3aae8bca9e2104610a3f7971df439ee3ddbc04a18b8c51ae6cd5f0081 Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.891900 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"eab5f6f8-38fe-40ac-8407-4fc5044eba84","Type":"ContainerStarted","Data":"7b75936f1f2410242eb27cd58777b066511b69eebbf52b20b46cca0cd62e64b2"} Dec 02 18:56:35 crc kubenswrapper[4792]: E1202 18:56:35.893551 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-compactor\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-compactor-0" podUID="eab5f6f8-38fe-40ac-8407-4fc5044eba84" Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.894278 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db787f15-5115-48a8-9443-93f5da555d2a","Type":"ContainerStarted","Data":"093508c8c3835bc3a809c8c88f7fd662370d487084878195ff1bcc9f49aadaad"} Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.898818 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6cfe9a05-cb43-47d3-84f8-95642cd098ec","Type":"ContainerStarted","Data":"7bd0a1f3aae8bca9e2104610a3f7971df439ee3ddbc04a18b8c51ae6cd5f0081"} Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.901721 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gpsrm" event={"ID":"d0abc322-ef0d-468b-9d23-4e2acd50b51a","Type":"ContainerStarted","Data":"ffc232c71e4d1098f8da79ef5d5df8b66410bdd16ab1df4ee9112e880779012f"} Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.903414 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" event={"ID":"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa","Type":"ContainerStarted","Data":"8f5156b2a1d5e83f38fab638220710fcc30780f6891928c3205b4239046efd04"} Dec 02 18:56:35 crc kubenswrapper[4792]: E1202 18:56:35.905006 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-distributor\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" podUID="c11c6af2-cd99-41e8-b6cf-b86ab025bbfa" Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.905242 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" event={"ID":"29663f5c-6fe7-42d5-8d53-c8d900e36a9c","Type":"ContainerStarted","Data":"62d788cc91fc89a49bff61782b1e0aa0295605963840dda456b63acae12b9bfb"} Dec 02 18:56:35 crc kubenswrapper[4792]: E1202 18:56:35.911878 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-query-frontend\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" podUID="29663f5c-6fe7-42d5-8d53-c8d900e36a9c" Dec 02 18:56:35 crc kubenswrapper[4792]: I1202 18:56:35.918883 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91bf1c70-0d0f-49f9-aae7-59865a7abd26","Type":"ContainerStarted","Data":"edf5babc0c4f9e1478a8abf57e1d59c7f9a268f06013f2d1cf056d868c7a6d36"} Dec 02 18:56:35 crc kubenswrapper[4792]: E1202 18:56:35.941514 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" Dec 02 18:56:36 crc kubenswrapper[4792]: E1202 18:56:36.925668 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-compactor\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-compactor-0" podUID="eab5f6f8-38fe-40ac-8407-4fc5044eba84" Dec 02 18:56:36 crc kubenswrapper[4792]: E1202 18:56:36.929834 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-distributor\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" podUID="c11c6af2-cd99-41e8-b6cf-b86ab025bbfa" Dec 02 18:56:36 crc kubenswrapper[4792]: E1202 18:56:36.929890 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-query-frontend\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" podUID="29663f5c-6fe7-42d5-8d53-c8d900e36a9c" Dec 02 18:56:38 crc kubenswrapper[4792]: I1202 18:56:38.948126 4792 generic.go:334] "Generic (PLEG): container finished" podID="91bf1c70-0d0f-49f9-aae7-59865a7abd26" containerID="edf5babc0c4f9e1478a8abf57e1d59c7f9a268f06013f2d1cf056d868c7a6d36" exitCode=0 Dec 02 18:56:38 crc kubenswrapper[4792]: I1202 18:56:38.948440 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91bf1c70-0d0f-49f9-aae7-59865a7abd26","Type":"ContainerDied","Data":"edf5babc0c4f9e1478a8abf57e1d59c7f9a268f06013f2d1cf056d868c7a6d36"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.017421 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp" event={"ID":"2a5ad51d-6996-42c0-b156-600ff9dc7782","Type":"ContainerStarted","Data":"052c92fc8f731d9870f5c7857b56da176864fd151414b65fe72d57379cac6996"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.018116 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-s44lp" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.019653 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"92495185-21e2-4db2-9b49-6c2b0267c324","Type":"ContainerStarted","Data":"223644feba9302490cb104883a77eeadff8924fe8f368acd3db2f0e52fecdb54"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.019934 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.023355 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91bf1c70-0d0f-49f9-aae7-59865a7abd26","Type":"ContainerStarted","Data":"e93773004d36c3cb57426ae331ce2b9db9b8a8ab9b54d82dd666d1c29e9c3a49"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.026151 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"2951974e-17c4-4cf6-b244-6efc7a6fc742","Type":"ContainerStarted","Data":"03924dc1852593b7007e52115c971ebb86eaa7d44da188ed4a0aa7463354c4d1"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.026414 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.029345 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" event={"ID":"2f112377-5fcb-424f-9fa1-f92ab0608d82","Type":"ContainerStarted","Data":"8486dcb059c98a0087dd27fe2c85a8425e9f08fd21f7ac2a5ce74e6d5bd0ae94"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.029595 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.032685 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" event={"ID":"376a394c-12c9-4fa9-b24a-841a6b05ba0b","Type":"ContainerStarted","Data":"93d425504d6b9ac374ca31f8032b17bfa4e970d3b7528db2c88a311395c2ca7e"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.037626 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" event={"ID":"a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4","Type":"ContainerStarted","Data":"518f17c05fb5eab0b096ee7465dd461b615052705401a97102602f26d783248d"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.037837 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.045178 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-s44lp" podStartSLOduration=28.794449357 podStartE2EDuration="36.045155349s" podCreationTimestamp="2025-12-02 18:56:08 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.635460044 +0000 UTC m=+1225.408352372" lastFinishedPulling="2025-12-02 18:56:41.886165996 +0000 UTC m=+1232.659058364" observedRunningTime="2025-12-02 18:56:44.040393775 +0000 UTC m=+1234.813286143" watchObservedRunningTime="2025-12-02 18:56:44.045155349 +0000 UTC m=+1234.818047717" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.047820 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"63ad1bca-0ff4-4694-ab0a-56e8f5366d88","Type":"ContainerStarted","Data":"28a1eb9246550e6b8522b9b5e56abc8369917d320733080653cf386d3e4abe3b"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.048655 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.049026 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.051206 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gpsrm" event={"ID":"d0abc322-ef0d-468b-9d23-4e2acd50b51a","Type":"ContainerStarted","Data":"b32acc841701681fc21c07b1c06bdd524607e88c74698043dd89bb7a2db1128b"} Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.087188 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.123277 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wz9ls" podStartSLOduration=23.822232866 podStartE2EDuration="31.123249972s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.566107966 +0000 UTC m=+1225.339000294" lastFinishedPulling="2025-12-02 18:56:41.867125032 +0000 UTC m=+1232.640017400" observedRunningTime="2025-12-02 18:56:44.110758959 +0000 UTC m=+1234.883651307" watchObservedRunningTime="2025-12-02 18:56:44.123249972 +0000 UTC m=+1234.896142320" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.138153 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" podStartSLOduration=23.95389908 podStartE2EDuration="31.138124558s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.61517489 +0000 UTC m=+1225.388067218" lastFinishedPulling="2025-12-02 18:56:41.799400368 +0000 UTC m=+1232.572292696" observedRunningTime="2025-12-02 18:56:44.079850328 +0000 UTC m=+1234.852742696" watchObservedRunningTime="2025-12-02 18:56:44.138124558 +0000 UTC m=+1234.911016906" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.145106 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=43.677355348 podStartE2EDuration="44.145083318s" podCreationTimestamp="2025-12-02 18:56:00 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.63567844 +0000 UTC m=+1225.408570768" lastFinishedPulling="2025-12-02 18:56:35.10340638 +0000 UTC m=+1225.876298738" observedRunningTime="2025-12-02 18:56:44.138558699 +0000 UTC m=+1234.911451047" watchObservedRunningTime="2025-12-02 18:56:44.145083318 +0000 UTC m=+1234.917975646" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.166609 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=24.047754785 podStartE2EDuration="31.166591975s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.680561968 +0000 UTC m=+1225.453454296" lastFinishedPulling="2025-12-02 18:56:41.799399147 +0000 UTC m=+1232.572291486" observedRunningTime="2025-12-02 18:56:44.1613677 +0000 UTC m=+1234.934260018" watchObservedRunningTime="2025-12-02 18:56:44.166591975 +0000 UTC m=+1234.939484303" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.175979 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-wsd5z" podStartSLOduration=24.368889152 podStartE2EDuration="31.175960928s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.688703224 +0000 UTC m=+1225.461595552" lastFinishedPulling="2025-12-02 18:56:41.495775 +0000 UTC m=+1232.268667328" observedRunningTime="2025-12-02 18:56:44.174133641 +0000 UTC m=+1234.947025969" watchObservedRunningTime="2025-12-02 18:56:44.175960928 +0000 UTC m=+1234.948853256" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.202468 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=35.316401997 podStartE2EDuration="42.202454955s" podCreationTimestamp="2025-12-02 18:56:02 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.609777073 +0000 UTC m=+1225.382669401" lastFinishedPulling="2025-12-02 18:56:41.495829991 +0000 UTC m=+1232.268722359" observedRunningTime="2025-12-02 18:56:44.192847896 +0000 UTC m=+1234.965740224" watchObservedRunningTime="2025-12-02 18:56:44.202454955 +0000 UTC m=+1234.975347273" Dec 02 18:56:44 crc kubenswrapper[4792]: I1202 18:56:44.281972 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=24.03146683 podStartE2EDuration="31.281953935s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.63527999 +0000 UTC m=+1225.408172318" lastFinishedPulling="2025-12-02 18:56:41.885767055 +0000 UTC m=+1232.658659423" observedRunningTime="2025-12-02 18:56:44.277343005 +0000 UTC m=+1235.050235333" watchObservedRunningTime="2025-12-02 18:56:44.281953935 +0000 UTC m=+1235.054846263" Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.074860 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6cfe9a05-cb43-47d3-84f8-95642cd098ec","Type":"ContainerStarted","Data":"7aedbe1ca6a28f0d1c8db66cda4d9efed9bd99ae2437363dc585d5191c70e7d4"} Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.078412 4792 generic.go:334] "Generic (PLEG): container finished" podID="d0abc322-ef0d-468b-9d23-4e2acd50b51a" containerID="b32acc841701681fc21c07b1c06bdd524607e88c74698043dd89bb7a2db1128b" exitCode=0 Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.078465 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gpsrm" event={"ID":"d0abc322-ef0d-468b-9d23-4e2acd50b51a","Type":"ContainerDied","Data":"b32acc841701681fc21c07b1c06bdd524607e88c74698043dd89bb7a2db1128b"} Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.080260 4792 generic.go:334] "Generic (PLEG): container finished" podID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerID="f1b9ba7f0083bba8aef8290a87652c513046541054a83ecdeb018a9f4b84aac1" exitCode=0 Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.080307 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" event={"ID":"6f2d96d0-f671-46cd-8e95-162a0773470d","Type":"ContainerDied","Data":"f1b9ba7f0083bba8aef8290a87652c513046541054a83ecdeb018a9f4b84aac1"} Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.083349 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerID="4b852c7574eaec26e72d26789d7e40f31e8788ea6a6ce2f41cfb359be6b2cf0a" exitCode=0 Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.084004 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" event={"ID":"4b100fb8-4ab3-4514-840a-98e861a2cc11","Type":"ContainerDied","Data":"4b852c7574eaec26e72d26789d7e40f31e8788ea6a6ce2f41cfb359be6b2cf0a"} Dec 02 18:56:45 crc kubenswrapper[4792]: I1202 18:56:45.085829 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:56:46 crc kubenswrapper[4792]: I1202 18:56:46.099198 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db787f15-5115-48a8-9443-93f5da555d2a","Type":"ContainerStarted","Data":"c450359a2b0dea585f91cc36af93b9221e9e64dd742269b9f242467802ee9056"} Dec 02 18:56:46 crc kubenswrapper[4792]: I1202 18:56:46.101829 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gpsrm" event={"ID":"d0abc322-ef0d-468b-9d23-4e2acd50b51a","Type":"ContainerStarted","Data":"2c8d961a7a9caaa56a345531f065589ced3b5b6896e1b1f10d8e904a50837926"} Dec 02 18:56:46 crc kubenswrapper[4792]: I1202 18:56:46.109309 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" event={"ID":"6f2d96d0-f671-46cd-8e95-162a0773470d","Type":"ContainerStarted","Data":"194c00decb1cdaa61c023c7bd7f2a15f6d3e6afc03290996bed646e5d9f93467"} Dec 02 18:56:46 crc kubenswrapper[4792]: I1202 18:56:46.109538 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:56:46 crc kubenswrapper[4792]: I1202 18:56:46.113675 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"26d86aa8-79aa-4d9b-ac24-155924920219","Type":"ContainerStarted","Data":"be6312f5fb6802bcbe04e53ec7a9f6fd20ceb3e905e9e3d1c7df3bf99c248f7c"} Dec 02 18:56:46 crc kubenswrapper[4792]: I1202 18:56:46.123904 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerStarted","Data":"e5a2809d92c07f18aae52eb3690be10358336df409d78b614cc40481bfa486bd"} Dec 02 18:56:46 crc kubenswrapper[4792]: I1202 18:56:46.151876 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" podStartSLOduration=-9223371987.702915 podStartE2EDuration="49.151860178s" podCreationTimestamp="2025-12-02 18:55:57 +0000 UTC" firstStartedPulling="2025-12-02 18:55:58.749385858 +0000 UTC m=+1189.522278196" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:56:46.148909422 +0000 UTC m=+1236.921801740" watchObservedRunningTime="2025-12-02 18:56:46.151860178 +0000 UTC m=+1236.924752506" Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.135475 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gpsrm" event={"ID":"d0abc322-ef0d-468b-9d23-4e2acd50b51a","Type":"ContainerStarted","Data":"87ec0ea834fea3510df4ca1302e3285a45906f2a02084bf863ee3e74d5ab6e99"} Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.135790 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.135803 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.137332 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3d0661cf-534a-4951-9e56-7db65fdfd242","Type":"ContainerStarted","Data":"a62c2df38630c7ebad8e25ccbad956ac0e5658754400e4b860a8eb3f0cf36f97"} Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.139875 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" event={"ID":"4b100fb8-4ab3-4514-840a-98e861a2cc11","Type":"ContainerStarted","Data":"89a5a1d1dc1e271b74a3be649065273e5c75f2921a128605b9f84b05b2c2d68e"} Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.182098 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-gpsrm" podStartSLOduration=32.216524256 podStartE2EDuration="39.182064902s" podCreationTimestamp="2025-12-02 18:56:08 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.894460002 +0000 UTC m=+1225.667352330" lastFinishedPulling="2025-12-02 18:56:41.860000608 +0000 UTC m=+1232.632892976" observedRunningTime="2025-12-02 18:56:47.160840342 +0000 UTC m=+1237.933732670" watchObservedRunningTime="2025-12-02 18:56:47.182064902 +0000 UTC m=+1237.954957260" Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.195079 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" podStartSLOduration=5.612712877 podStartE2EDuration="50.195032338s" podCreationTimestamp="2025-12-02 18:55:57 +0000 UTC" firstStartedPulling="2025-12-02 18:55:58.56527232 +0000 UTC m=+1189.338164648" lastFinishedPulling="2025-12-02 18:56:43.147591751 +0000 UTC m=+1233.920484109" observedRunningTime="2025-12-02 18:56:47.175505032 +0000 UTC m=+1237.948397360" watchObservedRunningTime="2025-12-02 18:56:47.195032338 +0000 UTC m=+1237.967924706" Dec 02 18:56:47 crc kubenswrapper[4792]: I1202 18:56:47.942577 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.184314 4792 generic.go:334] "Generic (PLEG): container finished" podID="26d86aa8-79aa-4d9b-ac24-155924920219" containerID="be6312f5fb6802bcbe04e53ec7a9f6fd20ceb3e905e9e3d1c7df3bf99c248f7c" exitCode=0 Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.184388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"26d86aa8-79aa-4d9b-ac24-155924920219","Type":"ContainerDied","Data":"be6312f5fb6802bcbe04e53ec7a9f6fd20ceb3e905e9e3d1c7df3bf99c248f7c"} Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.186817 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba","Type":"ContainerStarted","Data":"970ed1a9e9aefd99aafb9cf861569ab2b8f1f85a7d8d627a4cba42af2fd1adf8"} Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.645445 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.646985 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.747754 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.943725 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:56:52 crc kubenswrapper[4792]: I1202 18:56:52.943990 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.201332 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" event={"ID":"29663f5c-6fe7-42d5-8d53-c8d900e36a9c","Type":"ContainerStarted","Data":"30037972037d53af5e7af34024be7b3861e0eba010f65b16db86d8cc5a211396"} Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.202758 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.203840 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.207443 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c5e3683-f4d1-4f32-8c6d-ecc11415c660","Type":"ContainerStarted","Data":"992b9796a4109273d05eda401a9abed33d155dba7086aa81eca45191687649a1"} Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.217276 4792 generic.go:334] "Generic (PLEG): container finished" podID="357feab9-6738-4c52-8478-0763a304671f" containerID="e5a2809d92c07f18aae52eb3690be10358336df409d78b614cc40481bfa486bd" exitCode=0 Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.217381 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerDied","Data":"e5a2809d92c07f18aae52eb3690be10358336df409d78b614cc40481bfa486bd"} Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.227544 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"eab5f6f8-38fe-40ac-8407-4fc5044eba84","Type":"ContainerStarted","Data":"3d4b09cd5088c405cb22b6991548ed86ed8702a785f9ca2aad6c6ac00f518aa7"} Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.227837 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.230028 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db787f15-5115-48a8-9443-93f5da555d2a","Type":"ContainerStarted","Data":"7ec54d53295a66adb59abc3fc0c2b54c2836391c6eb4ce5c376d685cbcddcdd7"} Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.231454 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" podStartSLOduration=-9223371996.623343 podStartE2EDuration="40.231433733s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.899325035 +0000 UTC m=+1225.672217363" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:56:53.223503748 +0000 UTC m=+1243.996396076" watchObservedRunningTime="2025-12-02 18:56:53.231433733 +0000 UTC m=+1244.004326061" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.233024 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" event={"ID":"c11c6af2-cd99-41e8-b6cf-b86ab025bbfa","Type":"ContainerStarted","Data":"6094a16849beda6fb316f640e911426f2e07aaa0056330df38078c29fc547a39"} Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.233638 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.241431 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.241537 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.356623 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-fjdkj"] Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.356998 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" podStartSLOduration=-9223371996.497795 podStartE2EDuration="40.356981116s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.900869574 +0000 UTC m=+1225.673761902" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:56:53.335308165 +0000 UTC m=+1244.108200503" watchObservedRunningTime="2025-12-02 18:56:53.356981116 +0000 UTC m=+1244.129873444" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.357257 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerName="dnsmasq-dns" containerID="cri-o://89a5a1d1dc1e271b74a3be649065273e5c75f2921a128605b9f84b05b2c2d68e" gracePeriod=10 Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.363627 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.374609 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=29.856904533 podStartE2EDuration="47.374589292s" podCreationTimestamp="2025-12-02 18:56:06 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.998178862 +0000 UTC m=+1225.771071200" lastFinishedPulling="2025-12-02 18:56:52.515863621 +0000 UTC m=+1243.288755959" observedRunningTime="2025-12-02 18:56:53.365359913 +0000 UTC m=+1244.138252241" watchObservedRunningTime="2025-12-02 18:56:53.374589292 +0000 UTC m=+1244.147481620" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.388956 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 02 18:56:53 crc kubenswrapper[4792]: I1202 18:56:53.389123 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=-9223371996.465672 podStartE2EDuration="40.389104928s" podCreationTimestamp="2025-12-02 18:56:13 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.89676685 +0000 UTC m=+1225.669659178" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:56:53.384085348 +0000 UTC m=+1244.156977696" watchObservedRunningTime="2025-12-02 18:56:53.389104928 +0000 UTC m=+1244.161997256" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.286833 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-whpqc"] Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.294975 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerID="89a5a1d1dc1e271b74a3be649065273e5c75f2921a128605b9f84b05b2c2d68e" exitCode=0 Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.299606 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" event={"ID":"4b100fb8-4ab3-4514-840a-98e861a2cc11","Type":"ContainerDied","Data":"89a5a1d1dc1e271b74a3be649065273e5c75f2921a128605b9f84b05b2c2d68e"} Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.299724 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.316685 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-whpqc"] Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.368981 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.427630 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkvdr\" (UniqueName: \"kubernetes.io/projected/8c007d92-894f-4c86-bd8b-84dad70f41e5-kube-api-access-rkvdr\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.427754 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.432355 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-config\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.535305 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkvdr\" (UniqueName: \"kubernetes.io/projected/8c007d92-894f-4c86-bd8b-84dad70f41e5-kube-api-access-rkvdr\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.535400 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.536334 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.536419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-config\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.537427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-config\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.589141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkvdr\" (UniqueName: \"kubernetes.io/projected/8c007d92-894f-4c86-bd8b-84dad70f41e5-kube-api-access-rkvdr\") pod \"dnsmasq-dns-7cb5889db5-whpqc\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.637701 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.664372 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-whpqc"] Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.705504 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-dlvtt"] Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.707267 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.711350 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.711763 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.734290 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-dlvtt"] Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.740779 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8j8f\" (UniqueName: \"kubernetes.io/projected/4b100fb8-4ab3-4514-840a-98e861a2cc11-kube-api-access-k8j8f\") pod \"4b100fb8-4ab3-4514-840a-98e861a2cc11\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.740985 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-dns-svc\") pod \"4b100fb8-4ab3-4514-840a-98e861a2cc11\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.741015 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-config\") pod \"4b100fb8-4ab3-4514-840a-98e861a2cc11\" (UID: \"4b100fb8-4ab3-4514-840a-98e861a2cc11\") " Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.741347 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-config\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.741374 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hhs8\" (UniqueName: \"kubernetes.io/projected/79ae89c2-1470-4caa-ad26-718be5c3ab1f-kube-api-access-5hhs8\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.741392 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.741456 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.746019 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b100fb8-4ab3-4514-840a-98e861a2cc11-kube-api-access-k8j8f" (OuterVolumeSpecName: "kube-api-access-k8j8f") pod "4b100fb8-4ab3-4514-840a-98e861a2cc11" (UID: "4b100fb8-4ab3-4514-840a-98e861a2cc11"). InnerVolumeSpecName "kube-api-access-k8j8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.818424 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-nvx6l"] Dec 02 18:56:54 crc kubenswrapper[4792]: E1202 18:56:54.818803 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerName="dnsmasq-dns" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.818819 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerName="dnsmasq-dns" Dec 02 18:56:54 crc kubenswrapper[4792]: E1202 18:56:54.818837 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerName="init" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.818843 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerName="init" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.819113 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" containerName="dnsmasq-dns" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.819842 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.822636 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.829653 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-config" (OuterVolumeSpecName: "config") pod "4b100fb8-4ab3-4514-840a-98e861a2cc11" (UID: "4b100fb8-4ab3-4514-840a-98e861a2cc11"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.841949 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-ovs-rundir\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.841996 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-config\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.842015 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-ovn-rundir\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.842053 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.842081 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.842138 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zcrp\" (UniqueName: \"kubernetes.io/projected/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-kube-api-access-8zcrp\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.842166 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-config\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.842906 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.842976 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-config\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.843021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hhs8\" (UniqueName: \"kubernetes.io/projected/79ae89c2-1470-4caa-ad26-718be5c3ab1f-kube-api-access-5hhs8\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.843052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-combined-ca-bundle\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.843076 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.843197 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.843216 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8j8f\" (UniqueName: \"kubernetes.io/projected/4b100fb8-4ab3-4514-840a-98e861a2cc11-kube-api-access-k8j8f\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.843782 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.862914 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-nvx6l"] Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.873594 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hhs8\" (UniqueName: \"kubernetes.io/projected/79ae89c2-1470-4caa-ad26-718be5c3ab1f-kube-api-access-5hhs8\") pod \"dnsmasq-dns-74f6f696b9-dlvtt\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.898895 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b100fb8-4ab3-4514-840a-98e861a2cc11" (UID: "4b100fb8-4ab3-4514-840a-98e861a2cc11"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.943967 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zcrp\" (UniqueName: \"kubernetes.io/projected/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-kube-api-access-8zcrp\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.944024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-combined-ca-bundle\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.944051 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-ovs-rundir\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.944073 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-config\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.944095 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-ovn-rundir\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.944140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.944198 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b100fb8-4ab3-4514-840a-98e861a2cc11-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.944476 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-ovs-rundir\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.945287 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-config\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.945616 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-ovn-rundir\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.949431 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.950540 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-combined-ca-bundle\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:54 crc kubenswrapper[4792]: I1202 18:56:54.962111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zcrp\" (UniqueName: \"kubernetes.io/projected/0a44f4be-0f5d-45dc-9cb0-b4705d150c1a-kube-api-access-8zcrp\") pod \"ovn-controller-metrics-nvx6l\" (UID: \"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a\") " pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.028445 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.091670 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-dlvtt"] Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.139095 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-n56s7"] Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.140453 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.144482 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.149498 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtpjh\" (UniqueName: \"kubernetes.io/projected/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-kube-api-access-wtpjh\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.149561 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.149614 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-config\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.149787 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.149884 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-dns-svc\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.163379 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-n56s7"] Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.212436 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-nvx6l" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.251257 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-config\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.251331 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.251370 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-dns-svc\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.251427 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtpjh\" (UniqueName: \"kubernetes.io/projected/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-kube-api-access-wtpjh\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.251451 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.252183 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-config\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.256150 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.258179 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-dns-svc\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.258380 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.275860 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtpjh\" (UniqueName: \"kubernetes.io/projected/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-kube-api-access-wtpjh\") pod \"dnsmasq-dns-698758b865-n56s7\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.311162 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6cfe9a05-cb43-47d3-84f8-95642cd098ec","Type":"ContainerStarted","Data":"a05d1114d73b7c41a2515beb7727318c9b1ea3af9eac5ed22f028235e2cbbe5e"} Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.320455 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.321601 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-fjdkj" event={"ID":"4b100fb8-4ab3-4514-840a-98e861a2cc11","Type":"ContainerDied","Data":"96880bc2e2b80026631a59a9b1ddf9942168319b99b264fd616fc409c8e10892"} Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.321665 4792 scope.go:117] "RemoveContainer" containerID="89a5a1d1dc1e271b74a3be649065273e5c75f2921a128605b9f84b05b2c2d68e" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.363667 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=27.612350586 podStartE2EDuration="44.363646472s" podCreationTimestamp="2025-12-02 18:56:11 +0000 UTC" firstStartedPulling="2025-12-02 18:56:35.780067357 +0000 UTC m=+1226.552959685" lastFinishedPulling="2025-12-02 18:56:52.531363253 +0000 UTC m=+1243.304255571" observedRunningTime="2025-12-02 18:56:55.342582276 +0000 UTC m=+1246.115474604" watchObservedRunningTime="2025-12-02 18:56:55.363646472 +0000 UTC m=+1246.136538800" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.379839 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-fjdkj"] Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.388246 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-fjdkj"] Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.459495 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.493190 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.498927 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.502891 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.503066 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.503201 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-22z8p" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.503337 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.512707 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.550065 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b100fb8-4ab3-4514-840a-98e861a2cc11" path="/var/lib/kubelet/pods/4b100fb8-4ab3-4514-840a-98e861a2cc11/volumes" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.563439 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-cache\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.563851 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cplrl\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-kube-api-access-cplrl\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.563908 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.563963 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.563991 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-lock\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.665516 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-cache\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.665582 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cplrl\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-kube-api-access-cplrl\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.665632 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.665680 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.665707 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-lock\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.666136 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-lock\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.666354 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-cache\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: E1202 18:56:55.666456 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 18:56:55 crc kubenswrapper[4792]: E1202 18:56:55.666475 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 18:56:55 crc kubenswrapper[4792]: E1202 18:56:55.666536 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift podName:252fab2e-fcb7-43e8-940a-48adc8f4ebd5 nodeName:}" failed. No retries permitted until 2025-12-02 18:56:56.16650581 +0000 UTC m=+1246.939398128 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift") pod "swift-storage-0" (UID: "252fab2e-fcb7-43e8-940a-48adc8f4ebd5") : configmap "swift-ring-files" not found Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.670281 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.670310 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/535947c8656083f1455bb30f51bf83b8f433cb50e5d6d73e12afc5b7940864a5/globalmount\"" pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.687681 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cplrl\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-kube-api-access-cplrl\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:55 crc kubenswrapper[4792]: I1202 18:56:55.701058 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-011b1854-7800-4c56-bc6d-e34529bd6e28\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.037255 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-jd92w"] Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.039030 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.042969 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.047038 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.047422 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.054225 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-jd92w"] Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.176375 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-scripts\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.176439 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-swiftconf\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.176697 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/865e7f48-168c-4b42-a6a0-308250071747-etc-swift\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.176742 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-ring-data-devices\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.176780 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-combined-ca-bundle\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.177018 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.177196 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-dispersionconf\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.177271 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8nj\" (UniqueName: \"kubernetes.io/projected/865e7f48-168c-4b42-a6a0-308250071747-kube-api-access-zf8nj\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: E1202 18:56:56.177373 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 18:56:56 crc kubenswrapper[4792]: E1202 18:56:56.177402 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 18:56:56 crc kubenswrapper[4792]: E1202 18:56:56.177484 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift podName:252fab2e-fcb7-43e8-940a-48adc8f4ebd5 nodeName:}" failed. No retries permitted until 2025-12-02 18:56:57.17746264 +0000 UTC m=+1247.950354968 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift") pod "swift-storage-0" (UID: "252fab2e-fcb7-43e8-940a-48adc8f4ebd5") : configmap "swift-ring-files" not found Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.280261 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-ring-data-devices\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.280326 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/865e7f48-168c-4b42-a6a0-308250071747-etc-swift\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.280351 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-combined-ca-bundle\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.280471 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-dispersionconf\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.280549 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8nj\" (UniqueName: \"kubernetes.io/projected/865e7f48-168c-4b42-a6a0-308250071747-kube-api-access-zf8nj\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.280599 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-scripts\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.280623 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-swiftconf\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.281877 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-ring-data-devices\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.283752 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-scripts\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.283829 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/865e7f48-168c-4b42-a6a0-308250071747-etc-swift\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.284021 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-dispersionconf\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.290016 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-swiftconf\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.301665 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-combined-ca-bundle\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.304233 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf8nj\" (UniqueName: \"kubernetes.io/projected/865e7f48-168c-4b42-a6a0-308250071747-kube-api-access-zf8nj\") pod \"swift-ring-rebalance-jd92w\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.359263 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:56:56 crc kubenswrapper[4792]: I1202 18:56:56.900252 4792 scope.go:117] "RemoveContainer" containerID="4b852c7574eaec26e72d26789d7e40f31e8788ea6a6ce2f41cfb359be6b2cf0a" Dec 02 18:56:57 crc kubenswrapper[4792]: I1202 18:56:57.211399 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:57 crc kubenswrapper[4792]: E1202 18:56:57.211706 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 18:56:57 crc kubenswrapper[4792]: E1202 18:56:57.212656 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 18:56:57 crc kubenswrapper[4792]: E1202 18:56:57.212900 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift podName:252fab2e-fcb7-43e8-940a-48adc8f4ebd5 nodeName:}" failed. No retries permitted until 2025-12-02 18:56:59.212712884 +0000 UTC m=+1249.985605212 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift") pod "swift-storage-0" (UID: "252fab2e-fcb7-43e8-940a-48adc8f4ebd5") : configmap "swift-ring-files" not found Dec 02 18:56:57 crc kubenswrapper[4792]: I1202 18:56:57.526649 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-n56s7"] Dec 02 18:56:57 crc kubenswrapper[4792]: I1202 18:56:57.748513 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-nvx6l"] Dec 02 18:56:57 crc kubenswrapper[4792]: I1202 18:56:57.756591 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-jd92w"] Dec 02 18:56:57 crc kubenswrapper[4792]: I1202 18:56:57.765252 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-dlvtt"] Dec 02 18:56:57 crc kubenswrapper[4792]: I1202 18:56:57.833935 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-whpqc"] Dec 02 18:56:57 crc kubenswrapper[4792]: W1202 18:56:57.948907 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a44f4be_0f5d_45dc_9cb0_b4705d150c1a.slice/crio-414b82876d3510b176c8bb5465e61793b8aac1c9142382ea96540e15ec26f96e WatchSource:0}: Error finding container 414b82876d3510b176c8bb5465e61793b8aac1c9142382ea96540e15ec26f96e: Status 404 returned error can't find the container with id 414b82876d3510b176c8bb5465e61793b8aac1c9142382ea96540e15ec26f96e Dec 02 18:56:57 crc kubenswrapper[4792]: W1202 18:56:57.965541 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c007d92_894f_4c86_bd8b_84dad70f41e5.slice/crio-23c1b752b90058cf211a44b297c2f66dced1a3cbb88326a3f2c4cae072431fd2 WatchSource:0}: Error finding container 23c1b752b90058cf211a44b297c2f66dced1a3cbb88326a3f2c4cae072431fd2: Status 404 returned error can't find the container with id 23c1b752b90058cf211a44b297c2f66dced1a3cbb88326a3f2c4cae072431fd2 Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.071833 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.071888 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.133301 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.362320 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c5e3683-f4d1-4f32-8c6d-ecc11415c660" containerID="992b9796a4109273d05eda401a9abed33d155dba7086aa81eca45191687649a1" exitCode=0 Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.362441 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c5e3683-f4d1-4f32-8c6d-ecc11415c660","Type":"ContainerDied","Data":"992b9796a4109273d05eda401a9abed33d155dba7086aa81eca45191687649a1"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.367849 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-n56s7" event={"ID":"cb42b8ac-ce52-46dc-8742-339d3ea9ab03","Type":"ContainerStarted","Data":"b3b16047b3c5ed24afe14403544dd24de9bf5daec6fc0cc8834024fc25e432a5"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.368464 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-n56s7" event={"ID":"cb42b8ac-ce52-46dc-8742-339d3ea9ab03","Type":"ContainerStarted","Data":"5d6adc9c3e025f40d3b6d39542b5cc6f72a6d06d6d006542f0bc47e36752c9f2"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.372698 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"26d86aa8-79aa-4d9b-ac24-155924920219","Type":"ContainerStarted","Data":"9aa01e3ae5a0cc14a165cdf36234b5731474e501765875eedc860c011c30ab89"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.374016 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-nvx6l" event={"ID":"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a","Type":"ContainerStarted","Data":"414b82876d3510b176c8bb5465e61793b8aac1c9142382ea96540e15ec26f96e"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.378465 4792 generic.go:334] "Generic (PLEG): container finished" podID="79ae89c2-1470-4caa-ad26-718be5c3ab1f" containerID="148fa27b2a0453d543aa46fc955055c046223929fa660e370b23e1daffba0bd2" exitCode=0 Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.378599 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" event={"ID":"79ae89c2-1470-4caa-ad26-718be5c3ab1f","Type":"ContainerDied","Data":"148fa27b2a0453d543aa46fc955055c046223929fa660e370b23e1daffba0bd2"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.378638 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" event={"ID":"79ae89c2-1470-4caa-ad26-718be5c3ab1f","Type":"ContainerStarted","Data":"983acab9c4fc133e719b52785da139c4404601a0473c1df9bb445f5aa2ad872b"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.389202 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a9324ab-6fb7-4057-bf68-e74e0907aa80","Type":"ContainerStarted","Data":"b59d7484f19a224613f05a314b072578911a11c84b176d18185925e3062fdd18"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.390354 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.391456 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" event={"ID":"8c007d92-894f-4c86-bd8b-84dad70f41e5","Type":"ContainerStarted","Data":"322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.391502 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" event={"ID":"8c007d92-894f-4c86-bd8b-84dad70f41e5","Type":"ContainerStarted","Data":"23c1b752b90058cf211a44b297c2f66dced1a3cbb88326a3f2c4cae072431fd2"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.399299 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-jd92w" event={"ID":"865e7f48-168c-4b42-a6a0-308250071747","Type":"ContainerStarted","Data":"6ef80994a24f210ecc562f17d81f994f4b249edbdefd8e78ba9f594e8fbcb20c"} Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.472951 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=32.13738316 podStartE2EDuration="55.472923549s" podCreationTimestamp="2025-12-02 18:56:03 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.68971659 +0000 UTC m=+1225.462608918" lastFinishedPulling="2025-12-02 18:56:58.025256979 +0000 UTC m=+1248.798149307" observedRunningTime="2025-12-02 18:56:58.457971872 +0000 UTC m=+1249.230864210" watchObservedRunningTime="2025-12-02 18:56:58.472923549 +0000 UTC m=+1249.245815877" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.478116 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.611333 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.630255 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.630376 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.636964 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.637155 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.637750 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-cwldp" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.645694 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.746134 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.758250 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/69c2b30c-76db-4d6d-a091-3a86040f34fd-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.758335 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.758364 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/69c2b30c-76db-4d6d-a091-3a86040f34fd-scripts\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.758385 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xg7r\" (UniqueName: \"kubernetes.io/projected/69c2b30c-76db-4d6d-a091-3a86040f34fd-kube-api-access-4xg7r\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.758403 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.758461 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69c2b30c-76db-4d6d-a091-3a86040f34fd-config\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.758475 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.802747 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.859380 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-config\") pod \"8c007d92-894f-4c86-bd8b-84dad70f41e5\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.859450 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-config\") pod \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.859469 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-dns-svc\") pod \"8c007d92-894f-4c86-bd8b-84dad70f41e5\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.859533 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-ovsdbserver-nb\") pod \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.859578 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-dns-svc\") pod \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.859611 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hhs8\" (UniqueName: \"kubernetes.io/projected/79ae89c2-1470-4caa-ad26-718be5c3ab1f-kube-api-access-5hhs8\") pod \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\" (UID: \"79ae89c2-1470-4caa-ad26-718be5c3ab1f\") " Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.859718 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkvdr\" (UniqueName: \"kubernetes.io/projected/8c007d92-894f-4c86-bd8b-84dad70f41e5-kube-api-access-rkvdr\") pod \"8c007d92-894f-4c86-bd8b-84dad70f41e5\" (UID: \"8c007d92-894f-4c86-bd8b-84dad70f41e5\") " Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.860260 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69c2b30c-76db-4d6d-a091-3a86040f34fd-config\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.860349 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.860554 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/69c2b30c-76db-4d6d-a091-3a86040f34fd-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.862277 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.862318 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/69c2b30c-76db-4d6d-a091-3a86040f34fd-scripts\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.862360 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xg7r\" (UniqueName: \"kubernetes.io/projected/69c2b30c-76db-4d6d-a091-3a86040f34fd-kube-api-access-4xg7r\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.862387 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.865656 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/69c2b30c-76db-4d6d-a091-3a86040f34fd-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.867603 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79ae89c2-1470-4caa-ad26-718be5c3ab1f-kube-api-access-5hhs8" (OuterVolumeSpecName: "kube-api-access-5hhs8") pod "79ae89c2-1470-4caa-ad26-718be5c3ab1f" (UID: "79ae89c2-1470-4caa-ad26-718be5c3ab1f"). InnerVolumeSpecName "kube-api-access-5hhs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.867718 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c007d92-894f-4c86-bd8b-84dad70f41e5-kube-api-access-rkvdr" (OuterVolumeSpecName: "kube-api-access-rkvdr") pod "8c007d92-894f-4c86-bd8b-84dad70f41e5" (UID: "8c007d92-894f-4c86-bd8b-84dad70f41e5"). InnerVolumeSpecName "kube-api-access-rkvdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.870951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69c2b30c-76db-4d6d-a091-3a86040f34fd-config\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.873213 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.870482 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/69c2b30c-76db-4d6d-a091-3a86040f34fd-scripts\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.879260 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.881258 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c2b30c-76db-4d6d-a091-3a86040f34fd-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.886082 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "79ae89c2-1470-4caa-ad26-718be5c3ab1f" (UID: "79ae89c2-1470-4caa-ad26-718be5c3ab1f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.887149 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xg7r\" (UniqueName: \"kubernetes.io/projected/69c2b30c-76db-4d6d-a091-3a86040f34fd-kube-api-access-4xg7r\") pod \"ovn-northd-0\" (UID: \"69c2b30c-76db-4d6d-a091-3a86040f34fd\") " pod="openstack/ovn-northd-0" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.887203 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8c007d92-894f-4c86-bd8b-84dad70f41e5" (UID: "8c007d92-894f-4c86-bd8b-84dad70f41e5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.897864 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "79ae89c2-1470-4caa-ad26-718be5c3ab1f" (UID: "79ae89c2-1470-4caa-ad26-718be5c3ab1f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.900800 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-config" (OuterVolumeSpecName: "config") pod "8c007d92-894f-4c86-bd8b-84dad70f41e5" (UID: "8c007d92-894f-4c86-bd8b-84dad70f41e5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.921304 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-config" (OuterVolumeSpecName: "config") pod "79ae89c2-1470-4caa-ad26-718be5c3ab1f" (UID: "79ae89c2-1470-4caa-ad26-718be5c3ab1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.963822 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.963855 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hhs8\" (UniqueName: \"kubernetes.io/projected/79ae89c2-1470-4caa-ad26-718be5c3ab1f-kube-api-access-5hhs8\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.963867 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkvdr\" (UniqueName: \"kubernetes.io/projected/8c007d92-894f-4c86-bd8b-84dad70f41e5-kube-api-access-rkvdr\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.963877 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.963885 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.963893 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c007d92-894f-4c86-bd8b-84dad70f41e5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.963902 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79ae89c2-1470-4caa-ad26-718be5c3ab1f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:56:58 crc kubenswrapper[4792]: I1202 18:56:58.970825 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.277068 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:56:59 crc kubenswrapper[4792]: E1202 18:56:59.277266 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 18:56:59 crc kubenswrapper[4792]: E1202 18:56:59.277604 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 18:56:59 crc kubenswrapper[4792]: E1202 18:56:59.277656 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift podName:252fab2e-fcb7-43e8-940a-48adc8f4ebd5 nodeName:}" failed. No retries permitted until 2025-12-02 18:57:03.277640781 +0000 UTC m=+1254.050533109 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift") pod "swift-storage-0" (UID: "252fab2e-fcb7-43e8-940a-48adc8f4ebd5") : configmap "swift-ring-files" not found Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.409848 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-nvx6l" event={"ID":"0a44f4be-0f5d-45dc-9cb0-b4705d150c1a","Type":"ContainerStarted","Data":"7c8265ebcd9b4285232d72c9e7ffcd848e9be40b31cc30d2236d0f3ed9e5a58c"} Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.413772 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.413962 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-dlvtt" event={"ID":"79ae89c2-1470-4caa-ad26-718be5c3ab1f","Type":"ContainerDied","Data":"983acab9c4fc133e719b52785da139c4404601a0473c1df9bb445f5aa2ad872b"} Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.414050 4792 scope.go:117] "RemoveContainer" containerID="148fa27b2a0453d543aa46fc955055c046223929fa660e370b23e1daffba0bd2" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.420183 4792 generic.go:334] "Generic (PLEG): container finished" podID="8c007d92-894f-4c86-bd8b-84dad70f41e5" containerID="322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22" exitCode=0 Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.420254 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.420258 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" event={"ID":"8c007d92-894f-4c86-bd8b-84dad70f41e5","Type":"ContainerDied","Data":"322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22"} Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.420360 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-whpqc" event={"ID":"8c007d92-894f-4c86-bd8b-84dad70f41e5","Type":"ContainerDied","Data":"23c1b752b90058cf211a44b297c2f66dced1a3cbb88326a3f2c4cae072431fd2"} Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.425089 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0c5e3683-f4d1-4f32-8c6d-ecc11415c660","Type":"ContainerStarted","Data":"1f0a8939ca91395eb5b5ba30ac32068e5ff3f82a7673a160a4c7c1c935494aaf"} Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.426682 4792 generic.go:334] "Generic (PLEG): container finished" podID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerID="b3b16047b3c5ed24afe14403544dd24de9bf5daec6fc0cc8834024fc25e432a5" exitCode=0 Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.426784 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-n56s7" event={"ID":"cb42b8ac-ce52-46dc-8742-339d3ea9ab03","Type":"ContainerDied","Data":"b3b16047b3c5ed24afe14403544dd24de9bf5daec6fc0cc8834024fc25e432a5"} Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.446943 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-nvx6l" podStartSLOduration=5.446921967 podStartE2EDuration="5.446921967s" podCreationTimestamp="2025-12-02 18:56:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:56:59.423479 +0000 UTC m=+1250.196371338" watchObservedRunningTime="2025-12-02 18:56:59.446921967 +0000 UTC m=+1250.219814295" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.471134 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371976.383661 podStartE2EDuration="1m0.471114654s" podCreationTimestamp="2025-12-02 18:55:59 +0000 UTC" firstStartedPulling="2025-12-02 18:56:07.684653458 +0000 UTC m=+1198.457545796" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:56:59.471024462 +0000 UTC m=+1250.243916790" watchObservedRunningTime="2025-12-02 18:56:59.471114654 +0000 UTC m=+1250.244006992" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.477816 4792 scope.go:117] "RemoveContainer" containerID="322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.503275 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 18:56:59 crc kubenswrapper[4792]: W1202 18:56:59.511796 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69c2b30c_76db_4d6d_a091_3a86040f34fd.slice/crio-cf73fce96ab158410e1626628296466bde738f95fd8044277fba2057572396af WatchSource:0}: Error finding container cf73fce96ab158410e1626628296466bde738f95fd8044277fba2057572396af: Status 404 returned error can't find the container with id cf73fce96ab158410e1626628296466bde738f95fd8044277fba2057572396af Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.571299 4792 scope.go:117] "RemoveContainer" containerID="322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22" Dec 02 18:56:59 crc kubenswrapper[4792]: E1202 18:56:59.572744 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22\": container with ID starting with 322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22 not found: ID does not exist" containerID="322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.572807 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22"} err="failed to get container status \"322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22\": rpc error: code = NotFound desc = could not find container \"322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22\": container with ID starting with 322dd22735727ef6424844f4f28e598486b4211b4730e23992b29624eaa92a22 not found: ID does not exist" Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.573903 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-whpqc"] Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.573939 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-whpqc"] Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.589957 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-dlvtt"] Dec 02 18:56:59 crc kubenswrapper[4792]: I1202 18:56:59.617981 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-dlvtt"] Dec 02 18:57:00 crc kubenswrapper[4792]: I1202 18:57:00.442167 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-n56s7" event={"ID":"cb42b8ac-ce52-46dc-8742-339d3ea9ab03","Type":"ContainerStarted","Data":"560181dc27cc02423fda8f70bd5fe4924e2a1394e26186415302fa3a357de7fd"} Dec 02 18:57:00 crc kubenswrapper[4792]: I1202 18:57:00.442782 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:57:00 crc kubenswrapper[4792]: I1202 18:57:00.443776 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"69c2b30c-76db-4d6d-a091-3a86040f34fd","Type":"ContainerStarted","Data":"cf73fce96ab158410e1626628296466bde738f95fd8044277fba2057572396af"} Dec 02 18:57:01 crc kubenswrapper[4792]: I1202 18:57:01.242440 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 02 18:57:01 crc kubenswrapper[4792]: I1202 18:57:01.244187 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 02 18:57:01 crc kubenswrapper[4792]: E1202 18:57:01.546936 4792 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.188:42612->38.102.83.188:38837: write tcp 38.102.83.188:42612->38.102.83.188:38837: write: connection reset by peer Dec 02 18:57:01 crc kubenswrapper[4792]: I1202 18:57:01.557363 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79ae89c2-1470-4caa-ad26-718be5c3ab1f" path="/var/lib/kubelet/pods/79ae89c2-1470-4caa-ad26-718be5c3ab1f/volumes" Dec 02 18:57:01 crc kubenswrapper[4792]: I1202 18:57:01.558419 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c007d92-894f-4c86-bd8b-84dad70f41e5" path="/var/lib/kubelet/pods/8c007d92-894f-4c86-bd8b-84dad70f41e5/volumes" Dec 02 18:57:03 crc kubenswrapper[4792]: I1202 18:57:03.373140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:57:03 crc kubenswrapper[4792]: E1202 18:57:03.373896 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 18:57:03 crc kubenswrapper[4792]: E1202 18:57:03.373934 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 18:57:03 crc kubenswrapper[4792]: E1202 18:57:03.374023 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift podName:252fab2e-fcb7-43e8-940a-48adc8f4ebd5 nodeName:}" failed. No retries permitted until 2025-12-02 18:57:11.373995687 +0000 UTC m=+1262.146888025 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift") pod "swift-storage-0" (UID: "252fab2e-fcb7-43e8-940a-48adc8f4ebd5") : configmap "swift-ring-files" not found Dec 02 18:57:03 crc kubenswrapper[4792]: I1202 18:57:03.478388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"26d86aa8-79aa-4d9b-ac24-155924920219","Type":"ContainerStarted","Data":"14c1881980e1a23778efce4ec4039f0961bf5b84871b271ca54543093f842d34"} Dec 02 18:57:03 crc kubenswrapper[4792]: I1202 18:57:03.478878 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 02 18:57:03 crc kubenswrapper[4792]: I1202 18:57:03.483196 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 02 18:57:03 crc kubenswrapper[4792]: I1202 18:57:03.525761 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-n56s7" podStartSLOduration=8.525738039 podStartE2EDuration="8.525738039s" podCreationTimestamp="2025-12-02 18:56:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:00.477007939 +0000 UTC m=+1251.249900267" watchObservedRunningTime="2025-12-02 18:57:03.525738039 +0000 UTC m=+1254.298630377" Dec 02 18:57:03 crc kubenswrapper[4792]: I1202 18:57:03.526434 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=37.236248974 podStartE2EDuration="59.526425827s" podCreationTimestamp="2025-12-02 18:56:04 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.635195187 +0000 UTC m=+1225.408087515" lastFinishedPulling="2025-12-02 18:56:56.92537202 +0000 UTC m=+1247.698264368" observedRunningTime="2025-12-02 18:57:03.501201633 +0000 UTC m=+1254.274093961" watchObservedRunningTime="2025-12-02 18:57:03.526425827 +0000 UTC m=+1254.299318155" Dec 02 18:57:03 crc kubenswrapper[4792]: I1202 18:57:03.855557 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-548665d79b-vwc9f" Dec 02 18:57:03 crc kubenswrapper[4792]: E1202 18:57:03.928876 4792 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.188:42624->38.102.83.188:38837: write tcp 38.102.83.188:42624->38.102.83.188:38837: write: broken pipe Dec 02 18:57:04 crc kubenswrapper[4792]: I1202 18:57:04.517348 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 18:57:04 crc kubenswrapper[4792]: I1202 18:57:04.889745 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="63ad1bca-0ff4-4694-ab0a-56e8f5366d88" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 02 18:57:05 crc kubenswrapper[4792]: I1202 18:57:05.001033 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 02 18:57:05 crc kubenswrapper[4792]: I1202 18:57:05.462363 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:57:05 crc kubenswrapper[4792]: I1202 18:57:05.535595 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9hhkl"] Dec 02 18:57:05 crc kubenswrapper[4792]: I1202 18:57:05.535840 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerName="dnsmasq-dns" containerID="cri-o://194c00decb1cdaa61c023c7bd7f2a15f6d3e6afc03290996bed646e5d9f93467" gracePeriod=10 Dec 02 18:57:06 crc kubenswrapper[4792]: I1202 18:57:06.533364 4792 generic.go:334] "Generic (PLEG): container finished" podID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerID="194c00decb1cdaa61c023c7bd7f2a15f6d3e6afc03290996bed646e5d9f93467" exitCode=0 Dec 02 18:57:06 crc kubenswrapper[4792]: I1202 18:57:06.533401 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" event={"ID":"6f2d96d0-f671-46cd-8e95-162a0773470d","Type":"ContainerDied","Data":"194c00decb1cdaa61c023c7bd7f2a15f6d3e6afc03290996bed646e5d9f93467"} Dec 02 18:57:06 crc kubenswrapper[4792]: I1202 18:57:06.615780 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 02 18:57:06 crc kubenswrapper[4792]: I1202 18:57:06.729223 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="0c5e3683-f4d1-4f32-8c6d-ecc11415c660" containerName="galera" probeResult="failure" output=< Dec 02 18:57:06 crc kubenswrapper[4792]: wsrep_local_state_comment (Joined) differs from Synced Dec 02 18:57:06 crc kubenswrapper[4792]: > Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.554647 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" event={"ID":"6f2d96d0-f671-46cd-8e95-162a0773470d","Type":"ContainerDied","Data":"bd8870b20b018b9bd59ee44e9c18561ddc3b7f43b4c7000f2b5622a0f62a7c8b"} Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.555194 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd8870b20b018b9bd59ee44e9c18561ddc3b7f43b4c7000f2b5622a0f62a7c8b" Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.557277 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.689293 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnvwn\" (UniqueName: \"kubernetes.io/projected/6f2d96d0-f671-46cd-8e95-162a0773470d-kube-api-access-tnvwn\") pod \"6f2d96d0-f671-46cd-8e95-162a0773470d\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.689385 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-dns-svc\") pod \"6f2d96d0-f671-46cd-8e95-162a0773470d\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.689710 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-config\") pod \"6f2d96d0-f671-46cd-8e95-162a0773470d\" (UID: \"6f2d96d0-f671-46cd-8e95-162a0773470d\") " Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.694784 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f2d96d0-f671-46cd-8e95-162a0773470d-kube-api-access-tnvwn" (OuterVolumeSpecName: "kube-api-access-tnvwn") pod "6f2d96d0-f671-46cd-8e95-162a0773470d" (UID: "6f2d96d0-f671-46cd-8e95-162a0773470d"). InnerVolumeSpecName "kube-api-access-tnvwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.735090 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6f2d96d0-f671-46cd-8e95-162a0773470d" (UID: "6f2d96d0-f671-46cd-8e95-162a0773470d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.766879 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-config" (OuterVolumeSpecName: "config") pod "6f2d96d0-f671-46cd-8e95-162a0773470d" (UID: "6f2d96d0-f671-46cd-8e95-162a0773470d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.792852 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.792884 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnvwn\" (UniqueName: \"kubernetes.io/projected/6f2d96d0-f671-46cd-8e95-162a0773470d-kube-api-access-tnvwn\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:08 crc kubenswrapper[4792]: I1202 18:57:08.792897 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6f2d96d0-f671-46cd-8e95-162a0773470d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.570441 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-jd92w" event={"ID":"865e7f48-168c-4b42-a6a0-308250071747","Type":"ContainerStarted","Data":"b77ba6a74a59112d6f0a5bcfb244843bac4e3b24a3931cf406958d34633b9f50"} Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.572868 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerStarted","Data":"3f54a3b0040179d09f172163420f3935de7f9f98ccbfb59dedbb235ee6139d42"} Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.577075 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.601211 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"69c2b30c-76db-4d6d-a091-3a86040f34fd","Type":"ContainerStarted","Data":"efb01578590948d31196bfb857a848bb2bf0047cf617835eacd64c012606c7c2"} Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.601282 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"69c2b30c-76db-4d6d-a091-3a86040f34fd","Type":"ContainerStarted","Data":"448c3a416b115a668adabbdcf8b937df968adb2fba3deeedc31b2060cf2c8f4c"} Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.602167 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.627706 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-jd92w" podStartSLOduration=3.259669045 podStartE2EDuration="13.627678281s" podCreationTimestamp="2025-12-02 18:56:56 +0000 UTC" firstStartedPulling="2025-12-02 18:56:58.008420563 +0000 UTC m=+1248.781312881" lastFinishedPulling="2025-12-02 18:57:08.376429789 +0000 UTC m=+1259.149322117" observedRunningTime="2025-12-02 18:57:09.602425187 +0000 UTC m=+1260.375317525" watchObservedRunningTime="2025-12-02 18:57:09.627678281 +0000 UTC m=+1260.400570649" Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.639921 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.734301357 podStartE2EDuration="11.639893938s" podCreationTimestamp="2025-12-02 18:56:58 +0000 UTC" firstStartedPulling="2025-12-02 18:56:59.516044639 +0000 UTC m=+1250.288936967" lastFinishedPulling="2025-12-02 18:57:08.42163722 +0000 UTC m=+1259.194529548" observedRunningTime="2025-12-02 18:57:09.635376081 +0000 UTC m=+1260.408268419" watchObservedRunningTime="2025-12-02 18:57:09.639893938 +0000 UTC m=+1260.412786276" Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.668799 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9hhkl"] Dec 02 18:57:09 crc kubenswrapper[4792]: I1202 18:57:09.678386 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-9hhkl"] Dec 02 18:57:11 crc kubenswrapper[4792]: I1202 18:57:11.328886 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 02 18:57:11 crc kubenswrapper[4792]: I1202 18:57:11.474142 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:57:11 crc kubenswrapper[4792]: E1202 18:57:11.474336 4792 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 18:57:11 crc kubenswrapper[4792]: E1202 18:57:11.474356 4792 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 18:57:11 crc kubenswrapper[4792]: E1202 18:57:11.474403 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift podName:252fab2e-fcb7-43e8-940a-48adc8f4ebd5 nodeName:}" failed. No retries permitted until 2025-12-02 18:57:27.474385993 +0000 UTC m=+1278.247278321 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift") pod "swift-storage-0" (UID: "252fab2e-fcb7-43e8-940a-48adc8f4ebd5") : configmap "swift-ring-files" not found Dec 02 18:57:11 crc kubenswrapper[4792]: I1202 18:57:11.563169 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" path="/var/lib/kubelet/pods/6f2d96d0-f671-46cd-8e95-162a0773470d/volumes" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.295579 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-vh4wp"] Dec 02 18:57:12 crc kubenswrapper[4792]: E1202 18:57:12.295994 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.296013 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: E1202 18:57:12.296024 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c007d92-894f-4c86-bd8b-84dad70f41e5" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.296030 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c007d92-894f-4c86-bd8b-84dad70f41e5" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: E1202 18:57:12.296070 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79ae89c2-1470-4caa-ad26-718be5c3ab1f" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.296078 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="79ae89c2-1470-4caa-ad26-718be5c3ab1f" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: E1202 18:57:12.296094 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerName="dnsmasq-dns" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.296101 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerName="dnsmasq-dns" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.296309 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c007d92-894f-4c86-bd8b-84dad70f41e5" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.296331 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="79ae89c2-1470-4caa-ad26-718be5c3ab1f" containerName="init" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.296344 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerName="dnsmasq-dns" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.297178 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.305126 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-93d3-account-create-update-ptvkf"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.308432 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.312739 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vh4wp"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.313301 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.325587 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-93d3-account-create-update-ptvkf"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.391811 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtwcj\" (UniqueName: \"kubernetes.io/projected/c2d3693c-b058-4505-a62c-7cf9e374d3d7-kube-api-access-vtwcj\") pod \"keystone-db-create-vh4wp\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.391965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d3693c-b058-4505-a62c-7cf9e374d3d7-operator-scripts\") pod \"keystone-db-create-vh4wp\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.493573 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtwcj\" (UniqueName: \"kubernetes.io/projected/c2d3693c-b058-4505-a62c-7cf9e374d3d7-kube-api-access-vtwcj\") pod \"keystone-db-create-vh4wp\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.493983 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d3693c-b058-4505-a62c-7cf9e374d3d7-operator-scripts\") pod \"keystone-db-create-vh4wp\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.494016 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75ckv\" (UniqueName: \"kubernetes.io/projected/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-kube-api-access-75ckv\") pod \"keystone-93d3-account-create-update-ptvkf\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.494070 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-operator-scripts\") pod \"keystone-93d3-account-create-update-ptvkf\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.495368 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-64h7l"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.494911 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d3693c-b058-4505-a62c-7cf9e374d3d7-operator-scripts\") pod \"keystone-db-create-vh4wp\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.496801 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.517474 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtwcj\" (UniqueName: \"kubernetes.io/projected/c2d3693c-b058-4505-a62c-7cf9e374d3d7-kube-api-access-vtwcj\") pod \"keystone-db-create-vh4wp\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.518807 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-64h7l"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.591768 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-3cdb-account-create-update-p2q9g"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.593398 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.595255 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9cfe4ef-8938-431b-8489-9548343fec57-operator-scripts\") pod \"placement-db-create-64h7l\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.595363 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75ckv\" (UniqueName: \"kubernetes.io/projected/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-kube-api-access-75ckv\") pod \"keystone-93d3-account-create-update-ptvkf\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.595437 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-operator-scripts\") pod \"keystone-93d3-account-create-update-ptvkf\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.595467 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99b8h\" (UniqueName: \"kubernetes.io/projected/f9cfe4ef-8938-431b-8489-9548343fec57-kube-api-access-99b8h\") pod \"placement-db-create-64h7l\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.595768 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.596281 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-operator-scripts\") pod \"keystone-93d3-account-create-update-ptvkf\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.603485 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-3cdb-account-create-update-p2q9g"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.620580 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75ckv\" (UniqueName: \"kubernetes.io/projected/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-kube-api-access-75ckv\") pod \"keystone-93d3-account-create-update-ptvkf\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.620638 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.631078 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.697009 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc6j8\" (UniqueName: \"kubernetes.io/projected/c989d282-ef5d-4644-8610-8207aae7d341-kube-api-access-fc6j8\") pod \"placement-3cdb-account-create-update-p2q9g\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.697086 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99b8h\" (UniqueName: \"kubernetes.io/projected/f9cfe4ef-8938-431b-8489-9548343fec57-kube-api-access-99b8h\") pod \"placement-db-create-64h7l\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.697178 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9cfe4ef-8938-431b-8489-9548343fec57-operator-scripts\") pod \"placement-db-create-64h7l\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.697231 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c989d282-ef5d-4644-8610-8207aae7d341-operator-scripts\") pod \"placement-3cdb-account-create-update-p2q9g\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.699641 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9cfe4ef-8938-431b-8489-9548343fec57-operator-scripts\") pod \"placement-db-create-64h7l\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.720988 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99b8h\" (UniqueName: \"kubernetes.io/projected/f9cfe4ef-8938-431b-8489-9548343fec57-kube-api-access-99b8h\") pod \"placement-db-create-64h7l\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.798221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc6j8\" (UniqueName: \"kubernetes.io/projected/c989d282-ef5d-4644-8610-8207aae7d341-kube-api-access-fc6j8\") pod \"placement-3cdb-account-create-update-p2q9g\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.798376 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c989d282-ef5d-4644-8610-8207aae7d341-operator-scripts\") pod \"placement-3cdb-account-create-update-p2q9g\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.799081 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c989d282-ef5d-4644-8610-8207aae7d341-operator-scripts\") pod \"placement-3cdb-account-create-update-p2q9g\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.816748 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc6j8\" (UniqueName: \"kubernetes.io/projected/c989d282-ef5d-4644-8610-8207aae7d341-kube-api-access-fc6j8\") pod \"placement-3cdb-account-create-update-p2q9g\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.863301 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64h7l" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.908488 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.935656 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-gpxp2"] Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.937138 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:12 crc kubenswrapper[4792]: I1202 18:57:12.954196 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gpxp2"] Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.054612 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-9346-account-create-update-4t87k"] Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.059533 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.064584 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.074304 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9346-account-create-update-4t87k"] Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.105430 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0e58c2-6db5-439e-897c-89de96dfe2f2-operator-scripts\") pod \"glance-db-create-gpxp2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.105532 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvclf\" (UniqueName: \"kubernetes.io/projected/ad0e58c2-6db5-439e-897c-89de96dfe2f2-kube-api-access-lvclf\") pod \"glance-db-create-gpxp2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.125000 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-93d3-account-create-update-ptvkf"] Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.202638 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-9hhkl" podUID="6f2d96d0-f671-46cd-8e95-162a0773470d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.106:5353: i/o timeout" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.207887 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvclf\" (UniqueName: \"kubernetes.io/projected/ad0e58c2-6db5-439e-897c-89de96dfe2f2-kube-api-access-lvclf\") pod \"glance-db-create-gpxp2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.208007 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l6lg\" (UniqueName: \"kubernetes.io/projected/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-kube-api-access-4l6lg\") pod \"glance-9346-account-create-update-4t87k\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.208042 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-operator-scripts\") pod \"glance-9346-account-create-update-4t87k\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.208114 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0e58c2-6db5-439e-897c-89de96dfe2f2-operator-scripts\") pod \"glance-db-create-gpxp2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.209755 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0e58c2-6db5-439e-897c-89de96dfe2f2-operator-scripts\") pod \"glance-db-create-gpxp2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.215918 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vh4wp"] Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.226211 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvclf\" (UniqueName: \"kubernetes.io/projected/ad0e58c2-6db5-439e-897c-89de96dfe2f2-kube-api-access-lvclf\") pod \"glance-db-create-gpxp2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.270173 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.309454 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l6lg\" (UniqueName: \"kubernetes.io/projected/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-kube-api-access-4l6lg\") pod \"glance-9346-account-create-update-4t87k\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.309628 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-operator-scripts\") pod \"glance-9346-account-create-update-4t87k\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.310730 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-operator-scripts\") pod \"glance-9346-account-create-update-4t87k\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.330363 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l6lg\" (UniqueName: \"kubernetes.io/projected/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-kube-api-access-4l6lg\") pod \"glance-9346-account-create-update-4t87k\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.385344 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.477337 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-64h7l"] Dec 02 18:57:13 crc kubenswrapper[4792]: W1202 18:57:13.536354 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9cfe4ef_8938_431b_8489_9548343fec57.slice/crio-fceaaffddf4d660a0d1c39cfb8e22fd48fea0f2a467b33ac159698a238a9393c WatchSource:0}: Error finding container fceaaffddf4d660a0d1c39cfb8e22fd48fea0f2a467b33ac159698a238a9393c: Status 404 returned error can't find the container with id fceaaffddf4d660a0d1c39cfb8e22fd48fea0f2a467b33ac159698a238a9393c Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.601750 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-3cdb-account-create-update-p2q9g"] Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.673083 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerStarted","Data":"468bb6ff5c8c9b2fc5ac5d0d48cc56d770bd6b3c4aeb66e472caf0ca853b2128"} Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.680511 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-93d3-account-create-update-ptvkf" event={"ID":"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8","Type":"ContainerStarted","Data":"c32063f114b0587b4fa057851c5bba268a42615c302c85a5776f23ac21374016"} Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.680596 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-93d3-account-create-update-ptvkf" event={"ID":"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8","Type":"ContainerStarted","Data":"74c87cdc5dca0ba22ee9a9381ff5e084a779c3f284959a384ef1b46783bd3d0a"} Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.682970 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3cdb-account-create-update-p2q9g" event={"ID":"c989d282-ef5d-4644-8610-8207aae7d341","Type":"ContainerStarted","Data":"fe0c21558676d814ed7d7e8ec41945a8cbeacf75c95790d0beed4f2996661715"} Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.692926 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-64h7l" event={"ID":"f9cfe4ef-8938-431b-8489-9548343fec57","Type":"ContainerStarted","Data":"fceaaffddf4d660a0d1c39cfb8e22fd48fea0f2a467b33ac159698a238a9393c"} Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.694101 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vh4wp" event={"ID":"c2d3693c-b058-4505-a62c-7cf9e374d3d7","Type":"ContainerStarted","Data":"ed0daa3cb029ad38ac04f7c041663a931eccfe94b40feccc219e4774246baab3"} Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.694130 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vh4wp" event={"ID":"c2d3693c-b058-4505-a62c-7cf9e374d3d7","Type":"ContainerStarted","Data":"12352145618b5bb1ae89bd7080c766b07769e510e0bfb41d16f0178d185fb4bd"} Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.696726 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-93d3-account-create-update-ptvkf" podStartSLOduration=1.696712067 podStartE2EDuration="1.696712067s" podCreationTimestamp="2025-12-02 18:57:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:13.690866296 +0000 UTC m=+1264.463758624" watchObservedRunningTime="2025-12-02 18:57:13.696712067 +0000 UTC m=+1264.469604395" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.728182 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-lgcfh" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.753018 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-vh4wp" podStartSLOduration=1.752993016 podStartE2EDuration="1.752993016s" podCreationTimestamp="2025-12-02 18:57:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:13.712484286 +0000 UTC m=+1264.485376614" watchObservedRunningTime="2025-12-02 18:57:13.752993016 +0000 UTC m=+1264.525885344" Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.830477 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-gpxp2"] Dec 02 18:57:13 crc kubenswrapper[4792]: I1202 18:57:13.956338 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-n28fq" Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.030212 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9346-account-create-update-4t87k"] Dec 02 18:57:14 crc kubenswrapper[4792]: W1202 18:57:14.037134 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode62e5f1d_017c_4f71_9cd5_b02bd55a7138.slice/crio-1cc3bfb23c603829f333395dd3ffd9f0103e56ec0d9bad57f586f24910837d3f WatchSource:0}: Error finding container 1cc3bfb23c603829f333395dd3ffd9f0103e56ec0d9bad57f586f24910837d3f: Status 404 returned error can't find the container with id 1cc3bfb23c603829f333395dd3ffd9f0103e56ec0d9bad57f586f24910837d3f Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.189729 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-s44lp" podUID="2a5ad51d-6996-42c0-b156-600ff9dc7782" containerName="ovn-controller" probeResult="failure" output=< Dec 02 18:57:14 crc kubenswrapper[4792]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 02 18:57:14 crc kubenswrapper[4792]: > Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.708142 4792 generic.go:334] "Generic (PLEG): container finished" podID="ad0e58c2-6db5-439e-897c-89de96dfe2f2" containerID="ce372248cbea7bbf9d4d6e3b4a0539b4c6f6eb7bd6a57274f8e66f2f3ffb4499" exitCode=0 Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.708237 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gpxp2" event={"ID":"ad0e58c2-6db5-439e-897c-89de96dfe2f2","Type":"ContainerDied","Data":"ce372248cbea7bbf9d4d6e3b4a0539b4c6f6eb7bd6a57274f8e66f2f3ffb4499"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.708454 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gpxp2" event={"ID":"ad0e58c2-6db5-439e-897c-89de96dfe2f2","Type":"ContainerStarted","Data":"bd5164dd741bfbbbec19cb8f8b45a6923a4e10b3c80ad08debda35962268ba7f"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.715569 4792 generic.go:334] "Generic (PLEG): container finished" podID="c2d3693c-b058-4505-a62c-7cf9e374d3d7" containerID="ed0daa3cb029ad38ac04f7c041663a931eccfe94b40feccc219e4774246baab3" exitCode=0 Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.715762 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vh4wp" event={"ID":"c2d3693c-b058-4505-a62c-7cf9e374d3d7","Type":"ContainerDied","Data":"ed0daa3cb029ad38ac04f7c041663a931eccfe94b40feccc219e4774246baab3"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.717781 4792 generic.go:334] "Generic (PLEG): container finished" podID="6d35c039-6dbb-4fb5-b02e-9d8453c72dd8" containerID="c32063f114b0587b4fa057851c5bba268a42615c302c85a5776f23ac21374016" exitCode=0 Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.717871 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-93d3-account-create-update-ptvkf" event={"ID":"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8","Type":"ContainerDied","Data":"c32063f114b0587b4fa057851c5bba268a42615c302c85a5776f23ac21374016"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.719292 4792 generic.go:334] "Generic (PLEG): container finished" podID="c989d282-ef5d-4644-8610-8207aae7d341" containerID="4e8ab10fa2e77565e275eca732d628d40e006b77ebb16c9ccd8499db7eaf40da" exitCode=0 Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.719388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3cdb-account-create-update-p2q9g" event={"ID":"c989d282-ef5d-4644-8610-8207aae7d341","Type":"ContainerDied","Data":"4e8ab10fa2e77565e275eca732d628d40e006b77ebb16c9ccd8499db7eaf40da"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.726261 4792 generic.go:334] "Generic (PLEG): container finished" podID="f9cfe4ef-8938-431b-8489-9548343fec57" containerID="d0c0c9a3d10fb01f5e05c9012c476fb9cb057b865a2cff3a4895577d9588dccd" exitCode=0 Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.726344 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-64h7l" event={"ID":"f9cfe4ef-8938-431b-8489-9548343fec57","Type":"ContainerDied","Data":"d0c0c9a3d10fb01f5e05c9012c476fb9cb057b865a2cff3a4895577d9588dccd"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.728189 4792 generic.go:334] "Generic (PLEG): container finished" podID="e62e5f1d-017c-4f71-9cd5-b02bd55a7138" containerID="a551b2c7ac84b339341ea335745d4a67bade2c1ba3bec51228c0a8dc47d67a3c" exitCode=0 Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.728217 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9346-account-create-update-4t87k" event={"ID":"e62e5f1d-017c-4f71-9cd5-b02bd55a7138","Type":"ContainerDied","Data":"a551b2c7ac84b339341ea335745d4a67bade2c1ba3bec51228c0a8dc47d67a3c"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.728231 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9346-account-create-update-4t87k" event={"ID":"e62e5f1d-017c-4f71-9cd5-b02bd55a7138","Type":"ContainerStarted","Data":"1cc3bfb23c603829f333395dd3ffd9f0103e56ec0d9bad57f586f24910837d3f"} Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.887627 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="63ad1bca-0ff4-4694-ab0a-56e8f5366d88" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 02 18:57:14 crc kubenswrapper[4792]: I1202 18:57:14.937216 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 02 18:57:15 crc kubenswrapper[4792]: I1202 18:57:15.739980 4792 generic.go:334] "Generic (PLEG): container finished" podID="865e7f48-168c-4b42-a6a0-308250071747" containerID="b77ba6a74a59112d6f0a5bcfb244843bac4e3b24a3931cf406958d34633b9f50" exitCode=0 Dec 02 18:57:15 crc kubenswrapper[4792]: I1202 18:57:15.740099 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-jd92w" event={"ID":"865e7f48-168c-4b42-a6a0-308250071747","Type":"ContainerDied","Data":"b77ba6a74a59112d6f0a5bcfb244843bac4e3b24a3931cf406958d34633b9f50"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.245926 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64h7l" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.375026 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9cfe4ef-8938-431b-8489-9548343fec57-operator-scripts\") pod \"f9cfe4ef-8938-431b-8489-9548343fec57\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.375160 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99b8h\" (UniqueName: \"kubernetes.io/projected/f9cfe4ef-8938-431b-8489-9548343fec57-kube-api-access-99b8h\") pod \"f9cfe4ef-8938-431b-8489-9548343fec57\" (UID: \"f9cfe4ef-8938-431b-8489-9548343fec57\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.375823 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9cfe4ef-8938-431b-8489-9548343fec57-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f9cfe4ef-8938-431b-8489-9548343fec57" (UID: "f9cfe4ef-8938-431b-8489-9548343fec57"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.381140 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9cfe4ef-8938-431b-8489-9548343fec57-kube-api-access-99b8h" (OuterVolumeSpecName: "kube-api-access-99b8h") pod "f9cfe4ef-8938-431b-8489-9548343fec57" (UID: "f9cfe4ef-8938-431b-8489-9548343fec57"). InnerVolumeSpecName "kube-api-access-99b8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.432861 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.444854 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.454669 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.466988 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.472937 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.479758 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99b8h\" (UniqueName: \"kubernetes.io/projected/f9cfe4ef-8938-431b-8489-9548343fec57-kube-api-access-99b8h\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.479787 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9cfe4ef-8938-431b-8489-9548343fec57-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580284 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc6j8\" (UniqueName: \"kubernetes.io/projected/c989d282-ef5d-4644-8610-8207aae7d341-kube-api-access-fc6j8\") pod \"c989d282-ef5d-4644-8610-8207aae7d341\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580373 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-operator-scripts\") pod \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580429 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l6lg\" (UniqueName: \"kubernetes.io/projected/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-kube-api-access-4l6lg\") pod \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\" (UID: \"e62e5f1d-017c-4f71-9cd5-b02bd55a7138\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580483 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0e58c2-6db5-439e-897c-89de96dfe2f2-operator-scripts\") pod \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580699 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75ckv\" (UniqueName: \"kubernetes.io/projected/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-kube-api-access-75ckv\") pod \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580810 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtwcj\" (UniqueName: \"kubernetes.io/projected/c2d3693c-b058-4505-a62c-7cf9e374d3d7-kube-api-access-vtwcj\") pod \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580852 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c989d282-ef5d-4644-8610-8207aae7d341-operator-scripts\") pod \"c989d282-ef5d-4644-8610-8207aae7d341\" (UID: \"c989d282-ef5d-4644-8610-8207aae7d341\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580906 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d3693c-b058-4505-a62c-7cf9e374d3d7-operator-scripts\") pod \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\" (UID: \"c2d3693c-b058-4505-a62c-7cf9e374d3d7\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580935 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-operator-scripts\") pod \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\" (UID: \"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.580973 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvclf\" (UniqueName: \"kubernetes.io/projected/ad0e58c2-6db5-439e-897c-89de96dfe2f2-kube-api-access-lvclf\") pod \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\" (UID: \"ad0e58c2-6db5-439e-897c-89de96dfe2f2\") " Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.581345 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad0e58c2-6db5-439e-897c-89de96dfe2f2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad0e58c2-6db5-439e-897c-89de96dfe2f2" (UID: "ad0e58c2-6db5-439e-897c-89de96dfe2f2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.581378 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c989d282-ef5d-4644-8610-8207aae7d341-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c989d282-ef5d-4644-8610-8207aae7d341" (UID: "c989d282-ef5d-4644-8610-8207aae7d341"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.581450 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2d3693c-b058-4505-a62c-7cf9e374d3d7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c2d3693c-b058-4505-a62c-7cf9e374d3d7" (UID: "c2d3693c-b058-4505-a62c-7cf9e374d3d7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.581650 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e62e5f1d-017c-4f71-9cd5-b02bd55a7138" (UID: "e62e5f1d-017c-4f71-9cd5-b02bd55a7138"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.581818 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6d35c039-6dbb-4fb5-b02e-9d8453c72dd8" (UID: "6d35c039-6dbb-4fb5-b02e-9d8453c72dd8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.582615 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c989d282-ef5d-4644-8610-8207aae7d341-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.582689 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d3693c-b058-4505-a62c-7cf9e374d3d7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.582708 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.582765 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.582783 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0e58c2-6db5-439e-897c-89de96dfe2f2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.583863 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-kube-api-access-75ckv" (OuterVolumeSpecName: "kube-api-access-75ckv") pod "6d35c039-6dbb-4fb5-b02e-9d8453c72dd8" (UID: "6d35c039-6dbb-4fb5-b02e-9d8453c72dd8"). InnerVolumeSpecName "kube-api-access-75ckv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.584254 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c989d282-ef5d-4644-8610-8207aae7d341-kube-api-access-fc6j8" (OuterVolumeSpecName: "kube-api-access-fc6j8") pod "c989d282-ef5d-4644-8610-8207aae7d341" (UID: "c989d282-ef5d-4644-8610-8207aae7d341"). InnerVolumeSpecName "kube-api-access-fc6j8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.584497 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-kube-api-access-4l6lg" (OuterVolumeSpecName: "kube-api-access-4l6lg") pod "e62e5f1d-017c-4f71-9cd5-b02bd55a7138" (UID: "e62e5f1d-017c-4f71-9cd5-b02bd55a7138"). InnerVolumeSpecName "kube-api-access-4l6lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.585009 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad0e58c2-6db5-439e-897c-89de96dfe2f2-kube-api-access-lvclf" (OuterVolumeSpecName: "kube-api-access-lvclf") pod "ad0e58c2-6db5-439e-897c-89de96dfe2f2" (UID: "ad0e58c2-6db5-439e-897c-89de96dfe2f2"). InnerVolumeSpecName "kube-api-access-lvclf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.585242 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2d3693c-b058-4505-a62c-7cf9e374d3d7-kube-api-access-vtwcj" (OuterVolumeSpecName: "kube-api-access-vtwcj") pod "c2d3693c-b058-4505-a62c-7cf9e374d3d7" (UID: "c2d3693c-b058-4505-a62c-7cf9e374d3d7"). InnerVolumeSpecName "kube-api-access-vtwcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.685162 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l6lg\" (UniqueName: \"kubernetes.io/projected/e62e5f1d-017c-4f71-9cd5-b02bd55a7138-kube-api-access-4l6lg\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.685214 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75ckv\" (UniqueName: \"kubernetes.io/projected/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8-kube-api-access-75ckv\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.685234 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtwcj\" (UniqueName: \"kubernetes.io/projected/c2d3693c-b058-4505-a62c-7cf9e374d3d7-kube-api-access-vtwcj\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.685253 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvclf\" (UniqueName: \"kubernetes.io/projected/ad0e58c2-6db5-439e-897c-89de96dfe2f2-kube-api-access-lvclf\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.685271 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc6j8\" (UniqueName: \"kubernetes.io/projected/c989d282-ef5d-4644-8610-8207aae7d341-kube-api-access-fc6j8\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.936729 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3cdb-account-create-update-p2q9g" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.936769 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3cdb-account-create-update-p2q9g" event={"ID":"c989d282-ef5d-4644-8610-8207aae7d341","Type":"ContainerDied","Data":"fe0c21558676d814ed7d7e8ec41945a8cbeacf75c95790d0beed4f2996661715"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.936830 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe0c21558676d814ed7d7e8ec41945a8cbeacf75c95790d0beed4f2996661715" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.947315 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64h7l" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.948467 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-64h7l" event={"ID":"f9cfe4ef-8938-431b-8489-9548343fec57","Type":"ContainerDied","Data":"fceaaffddf4d660a0d1c39cfb8e22fd48fea0f2a467b33ac159698a238a9393c"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.948510 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fceaaffddf4d660a0d1c39cfb8e22fd48fea0f2a467b33ac159698a238a9393c" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.953933 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9346-account-create-update-4t87k" event={"ID":"e62e5f1d-017c-4f71-9cd5-b02bd55a7138","Type":"ContainerDied","Data":"1cc3bfb23c603829f333395dd3ffd9f0103e56ec0d9bad57f586f24910837d3f"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.953989 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cc3bfb23c603829f333395dd3ffd9f0103e56ec0d9bad57f586f24910837d3f" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.954012 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9346-account-create-update-4t87k" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.956438 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-gpxp2" event={"ID":"ad0e58c2-6db5-439e-897c-89de96dfe2f2","Type":"ContainerDied","Data":"bd5164dd741bfbbbec19cb8f8b45a6923a4e10b3c80ad08debda35962268ba7f"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.956475 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd5164dd741bfbbbec19cb8f8b45a6923a4e10b3c80ad08debda35962268ba7f" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.956571 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-gpxp2" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.960774 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vh4wp" event={"ID":"c2d3693c-b058-4505-a62c-7cf9e374d3d7","Type":"ContainerDied","Data":"12352145618b5bb1ae89bd7080c766b07769e510e0bfb41d16f0178d185fb4bd"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.960795 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12352145618b5bb1ae89bd7080c766b07769e510e0bfb41d16f0178d185fb4bd" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.960939 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vh4wp" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.964163 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerStarted","Data":"ccd641fb17e97c23203209ae4f606f566c15f572d235008d3cc4f3c3e193011d"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.966500 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-93d3-account-create-update-ptvkf" event={"ID":"6d35c039-6dbb-4fb5-b02e-9d8453c72dd8","Type":"ContainerDied","Data":"74c87cdc5dca0ba22ee9a9381ff5e084a779c3f284959a384ef1b46783bd3d0a"} Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.966557 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-93d3-account-create-update-ptvkf" Dec 02 18:57:16 crc kubenswrapper[4792]: I1202 18:57:16.966593 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74c87cdc5dca0ba22ee9a9381ff5e084a779c3f284959a384ef1b46783bd3d0a" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.024997 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=31.9291484 podStartE2EDuration="1m13.024822265s" podCreationTimestamp="2025-12-02 18:56:04 +0000 UTC" firstStartedPulling="2025-12-02 18:56:34.683097492 +0000 UTC m=+1225.455989820" lastFinishedPulling="2025-12-02 18:57:15.778771337 +0000 UTC m=+1266.551663685" observedRunningTime="2025-12-02 18:57:17.01770065 +0000 UTC m=+1267.790592998" watchObservedRunningTime="2025-12-02 18:57:17.024822265 +0000 UTC m=+1267.797714603" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.346064 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.500808 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-combined-ca-bundle\") pod \"865e7f48-168c-4b42-a6a0-308250071747\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.500922 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-ring-data-devices\") pod \"865e7f48-168c-4b42-a6a0-308250071747\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.501038 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf8nj\" (UniqueName: \"kubernetes.io/projected/865e7f48-168c-4b42-a6a0-308250071747-kube-api-access-zf8nj\") pod \"865e7f48-168c-4b42-a6a0-308250071747\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.501081 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-swiftconf\") pod \"865e7f48-168c-4b42-a6a0-308250071747\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.501129 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-scripts\") pod \"865e7f48-168c-4b42-a6a0-308250071747\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.501184 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/865e7f48-168c-4b42-a6a0-308250071747-etc-swift\") pod \"865e7f48-168c-4b42-a6a0-308250071747\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.501298 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-dispersionconf\") pod \"865e7f48-168c-4b42-a6a0-308250071747\" (UID: \"865e7f48-168c-4b42-a6a0-308250071747\") " Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.501856 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "865e7f48-168c-4b42-a6a0-308250071747" (UID: "865e7f48-168c-4b42-a6a0-308250071747"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.501989 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/865e7f48-168c-4b42-a6a0-308250071747-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "865e7f48-168c-4b42-a6a0-308250071747" (UID: "865e7f48-168c-4b42-a6a0-308250071747"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.520854 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/865e7f48-168c-4b42-a6a0-308250071747-kube-api-access-zf8nj" (OuterVolumeSpecName: "kube-api-access-zf8nj") pod "865e7f48-168c-4b42-a6a0-308250071747" (UID: "865e7f48-168c-4b42-a6a0-308250071747"). InnerVolumeSpecName "kube-api-access-zf8nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.525581 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "865e7f48-168c-4b42-a6a0-308250071747" (UID: "865e7f48-168c-4b42-a6a0-308250071747"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.554213 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "865e7f48-168c-4b42-a6a0-308250071747" (UID: "865e7f48-168c-4b42-a6a0-308250071747"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.559673 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "865e7f48-168c-4b42-a6a0-308250071747" (UID: "865e7f48-168c-4b42-a6a0-308250071747"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.565890 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-scripts" (OuterVolumeSpecName: "scripts") pod "865e7f48-168c-4b42-a6a0-308250071747" (UID: "865e7f48-168c-4b42-a6a0-308250071747"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.604382 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.604427 4792 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.604448 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf8nj\" (UniqueName: \"kubernetes.io/projected/865e7f48-168c-4b42-a6a0-308250071747-kube-api-access-zf8nj\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.604473 4792 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.604492 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865e7f48-168c-4b42-a6a0-308250071747-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.604509 4792 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/865e7f48-168c-4b42-a6a0-308250071747-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.604552 4792 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/865e7f48-168c-4b42-a6a0-308250071747-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.984675 4792 generic.go:334] "Generic (PLEG): container finished" podID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerID="a62c2df38630c7ebad8e25ccbad956ac0e5658754400e4b860a8eb3f0cf36f97" exitCode=0 Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.984815 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3d0661cf-534a-4951-9e56-7db65fdfd242","Type":"ContainerDied","Data":"a62c2df38630c7ebad8e25ccbad956ac0e5658754400e4b860a8eb3f0cf36f97"} Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.991931 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-jd92w" event={"ID":"865e7f48-168c-4b42-a6a0-308250071747","Type":"ContainerDied","Data":"6ef80994a24f210ecc562f17d81f994f4b249edbdefd8e78ba9f594e8fbcb20c"} Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.991983 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ef80994a24f210ecc562f17d81f994f4b249edbdefd8e78ba9f594e8fbcb20c" Dec 02 18:57:17 crc kubenswrapper[4792]: I1202 18:57:17.992280 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jd92w" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.339716 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-wh2n2"] Dec 02 18:57:18 crc kubenswrapper[4792]: E1202 18:57:18.340149 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad0e58c2-6db5-439e-897c-89de96dfe2f2" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340177 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad0e58c2-6db5-439e-897c-89de96dfe2f2" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: E1202 18:57:18.340197 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c989d282-ef5d-4644-8610-8207aae7d341" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340209 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c989d282-ef5d-4644-8610-8207aae7d341" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: E1202 18:57:18.340231 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d3693c-b058-4505-a62c-7cf9e374d3d7" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340244 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d3693c-b058-4505-a62c-7cf9e374d3d7" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: E1202 18:57:18.340279 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9cfe4ef-8938-431b-8489-9548343fec57" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340291 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9cfe4ef-8938-431b-8489-9548343fec57" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: E1202 18:57:18.340310 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865e7f48-168c-4b42-a6a0-308250071747" containerName="swift-ring-rebalance" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340322 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="865e7f48-168c-4b42-a6a0-308250071747" containerName="swift-ring-rebalance" Dec 02 18:57:18 crc kubenswrapper[4792]: E1202 18:57:18.340339 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e62e5f1d-017c-4f71-9cd5-b02bd55a7138" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340351 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e62e5f1d-017c-4f71-9cd5-b02bd55a7138" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: E1202 18:57:18.340378 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d35c039-6dbb-4fb5-b02e-9d8453c72dd8" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340388 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d35c039-6dbb-4fb5-b02e-9d8453c72dd8" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340742 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d3693c-b058-4505-a62c-7cf9e374d3d7" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340775 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c989d282-ef5d-4644-8610-8207aae7d341" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340798 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad0e58c2-6db5-439e-897c-89de96dfe2f2" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340826 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e62e5f1d-017c-4f71-9cd5-b02bd55a7138" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340843 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="865e7f48-168c-4b42-a6a0-308250071747" containerName="swift-ring-rebalance" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340870 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9cfe4ef-8938-431b-8489-9548343fec57" containerName="mariadb-database-create" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.340891 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d35c039-6dbb-4fb5-b02e-9d8453c72dd8" containerName="mariadb-account-create-update" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.341881 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.344290 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.344835 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6m7wg" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.384814 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-wh2n2"] Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.425698 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-db-sync-config-data\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.425841 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltfvk\" (UniqueName: \"kubernetes.io/projected/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-kube-api-access-ltfvk\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.426171 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-config-data\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.426321 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-combined-ca-bundle\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.528340 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-combined-ca-bundle\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.528419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-db-sync-config-data\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.528455 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltfvk\" (UniqueName: \"kubernetes.io/projected/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-kube-api-access-ltfvk\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.528650 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-config-data\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.534153 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-db-sync-config-data\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.538673 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-combined-ca-bundle\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.539091 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-config-data\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.550905 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltfvk\" (UniqueName: \"kubernetes.io/projected/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-kube-api-access-ltfvk\") pod \"glance-db-sync-wh2n2\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:18 crc kubenswrapper[4792]: I1202 18:57:18.703796 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.017453 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3d0661cf-534a-4951-9e56-7db65fdfd242","Type":"ContainerStarted","Data":"4ffe4ac1c8aaa9d73926bbef55548588a25905b08bce9cdf7a9fec6975c74a62"} Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.018120 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.043231 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.058954768 podStartE2EDuration="1m22.043212154s" podCreationTimestamp="2025-12-02 18:55:57 +0000 UTC" firstStartedPulling="2025-12-02 18:56:00.174531235 +0000 UTC m=+1190.947423563" lastFinishedPulling="2025-12-02 18:56:43.158788581 +0000 UTC m=+1233.931680949" observedRunningTime="2025-12-02 18:57:19.038849851 +0000 UTC m=+1269.811742179" watchObservedRunningTime="2025-12-02 18:57:19.043212154 +0000 UTC m=+1269.816104492" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.048382 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.177817 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-s44lp" podUID="2a5ad51d-6996-42c0-b156-600ff9dc7782" containerName="ovn-controller" probeResult="failure" output=< Dec 02 18:57:19 crc kubenswrapper[4792]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 02 18:57:19 crc kubenswrapper[4792]: > Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.225196 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.251929 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gpsrm" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.294910 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-wh2n2"] Dec 02 18:57:19 crc kubenswrapper[4792]: W1202 18:57:19.299215 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98cbdc65_cc24_4a81_899b_66de1d1a6ca3.slice/crio-a6b26d5ae4fe9cd5c069c642eec76460db5b08c05a5250ec3b2c3788e8b1b336 WatchSource:0}: Error finding container a6b26d5ae4fe9cd5c069c642eec76460db5b08c05a5250ec3b2c3788e8b1b336: Status 404 returned error can't find the container with id a6b26d5ae4fe9cd5c069c642eec76460db5b08c05a5250ec3b2c3788e8b1b336 Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.444142 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-s44lp-config-4mdqp"] Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.445475 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.447951 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.457366 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s44lp-config-4mdqp"] Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.545200 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-additional-scripts\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.545253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.545333 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-log-ovn\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.545481 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run-ovn\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.545546 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2cz6\" (UniqueName: \"kubernetes.io/projected/b44b9f39-dba9-44f9-980d-845b340b8922-kube-api-access-z2cz6\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.545571 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-scripts\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647129 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-log-ovn\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647207 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run-ovn\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647247 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2cz6\" (UniqueName: \"kubernetes.io/projected/b44b9f39-dba9-44f9-980d-845b340b8922-kube-api-access-z2cz6\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647272 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-scripts\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647440 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-additional-scripts\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647469 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647438 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-log-ovn\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.647438 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run-ovn\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.648449 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.649454 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-additional-scripts\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.650732 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-scripts\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.677733 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2cz6\" (UniqueName: \"kubernetes.io/projected/b44b9f39-dba9-44f9-980d-845b340b8922-kube-api-access-z2cz6\") pod \"ovn-controller-s44lp-config-4mdqp\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: I1202 18:57:19.791884 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:19 crc kubenswrapper[4792]: E1202 18:57:19.906199 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f2d96d0_f671_46cd_8e95_162a0773470d.slice\": RecentStats: unable to find data in memory cache]" Dec 02 18:57:20 crc kubenswrapper[4792]: I1202 18:57:20.026187 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wh2n2" event={"ID":"98cbdc65-cc24-4a81-899b-66de1d1a6ca3","Type":"ContainerStarted","Data":"a6b26d5ae4fe9cd5c069c642eec76460db5b08c05a5250ec3b2c3788e8b1b336"} Dec 02 18:57:20 crc kubenswrapper[4792]: I1202 18:57:20.262601 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s44lp-config-4mdqp"] Dec 02 18:57:20 crc kubenswrapper[4792]: W1202 18:57:20.274888 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb44b9f39_dba9_44f9_980d_845b340b8922.slice/crio-ceb595d257b79eb5d9af6fa00efb7726002de8b522b213ff3b03854c8ab650b9 WatchSource:0}: Error finding container ceb595d257b79eb5d9af6fa00efb7726002de8b522b213ff3b03854c8ab650b9: Status 404 returned error can't find the container with id ceb595d257b79eb5d9af6fa00efb7726002de8b522b213ff3b03854c8ab650b9 Dec 02 18:57:20 crc kubenswrapper[4792]: I1202 18:57:20.941787 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:20 crc kubenswrapper[4792]: I1202 18:57:20.942121 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:20 crc kubenswrapper[4792]: I1202 18:57:20.944677 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:21 crc kubenswrapper[4792]: I1202 18:57:21.037193 4792 generic.go:334] "Generic (PLEG): container finished" podID="b44b9f39-dba9-44f9-980d-845b340b8922" containerID="bcdb42e0a9fdd644353747906bb3bb9cd22fa5854c75a7389e00d2099fb21b87" exitCode=0 Dec 02 18:57:21 crc kubenswrapper[4792]: I1202 18:57:21.037302 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp-config-4mdqp" event={"ID":"b44b9f39-dba9-44f9-980d-845b340b8922","Type":"ContainerDied","Data":"bcdb42e0a9fdd644353747906bb3bb9cd22fa5854c75a7389e00d2099fb21b87"} Dec 02 18:57:21 crc kubenswrapper[4792]: I1202 18:57:21.037359 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp-config-4mdqp" event={"ID":"b44b9f39-dba9-44f9-980d-845b340b8922","Type":"ContainerStarted","Data":"ceb595d257b79eb5d9af6fa00efb7726002de8b522b213ff3b03854c8ab650b9"} Dec 02 18:57:21 crc kubenswrapper[4792]: I1202 18:57:21.039472 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.498488 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625163 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run\") pod \"b44b9f39-dba9-44f9-980d-845b340b8922\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625209 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run-ovn\") pod \"b44b9f39-dba9-44f9-980d-845b340b8922\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625263 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-additional-scripts\") pod \"b44b9f39-dba9-44f9-980d-845b340b8922\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625239 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run" (OuterVolumeSpecName: "var-run") pod "b44b9f39-dba9-44f9-980d-845b340b8922" (UID: "b44b9f39-dba9-44f9-980d-845b340b8922"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625327 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-log-ovn\") pod \"b44b9f39-dba9-44f9-980d-845b340b8922\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625339 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "b44b9f39-dba9-44f9-980d-845b340b8922" (UID: "b44b9f39-dba9-44f9-980d-845b340b8922"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625366 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "b44b9f39-dba9-44f9-980d-845b340b8922" (UID: "b44b9f39-dba9-44f9-980d-845b340b8922"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625444 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-scripts\") pod \"b44b9f39-dba9-44f9-980d-845b340b8922\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.625498 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2cz6\" (UniqueName: \"kubernetes.io/projected/b44b9f39-dba9-44f9-980d-845b340b8922-kube-api-access-z2cz6\") pod \"b44b9f39-dba9-44f9-980d-845b340b8922\" (UID: \"b44b9f39-dba9-44f9-980d-845b340b8922\") " Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.626583 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-scripts" (OuterVolumeSpecName: "scripts") pod "b44b9f39-dba9-44f9-980d-845b340b8922" (UID: "b44b9f39-dba9-44f9-980d-845b340b8922"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.626597 4792 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.626824 4792 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.626835 4792 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b44b9f39-dba9-44f9-980d-845b340b8922-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.627202 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "b44b9f39-dba9-44f9-980d-845b340b8922" (UID: "b44b9f39-dba9-44f9-980d-845b340b8922"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.634819 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b44b9f39-dba9-44f9-980d-845b340b8922-kube-api-access-z2cz6" (OuterVolumeSpecName: "kube-api-access-z2cz6") pod "b44b9f39-dba9-44f9-980d-845b340b8922" (UID: "b44b9f39-dba9-44f9-980d-845b340b8922"). InnerVolumeSpecName "kube-api-access-z2cz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.728441 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.728477 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2cz6\" (UniqueName: \"kubernetes.io/projected/b44b9f39-dba9-44f9-980d-845b340b8922-kube-api-access-z2cz6\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:22 crc kubenswrapper[4792]: I1202 18:57:22.728494 4792 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b44b9f39-dba9-44f9-980d-845b340b8922-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.060203 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp-config-4mdqp" event={"ID":"b44b9f39-dba9-44f9-980d-845b340b8922","Type":"ContainerDied","Data":"ceb595d257b79eb5d9af6fa00efb7726002de8b522b213ff3b03854c8ab650b9"} Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.060243 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ceb595d257b79eb5d9af6fa00efb7726002de8b522b213ff3b03854c8ab650b9" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.060249 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-4mdqp" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.602469 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-s44lp-config-4mdqp"] Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.611933 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-s44lp-config-4mdqp"] Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.731035 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-s44lp-config-j6nlx"] Dec 02 18:57:23 crc kubenswrapper[4792]: E1202 18:57:23.731460 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44b9f39-dba9-44f9-980d-845b340b8922" containerName="ovn-config" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.731482 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44b9f39-dba9-44f9-980d-845b340b8922" containerName="ovn-config" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.731726 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44b9f39-dba9-44f9-980d-845b340b8922" containerName="ovn-config" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.732485 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.734285 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.748432 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s44lp-config-j6nlx"] Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.850738 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-log-ovn\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.850790 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-additional-scripts\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.850818 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run-ovn\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.850931 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-scripts\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.851001 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.851039 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68phr\" (UniqueName: \"kubernetes.io/projected/807aae1e-88ef-4966-8717-aae7b6f0f2ee-kube-api-access-68phr\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-scripts\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952311 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952350 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68phr\" (UniqueName: \"kubernetes.io/projected/807aae1e-88ef-4966-8717-aae7b6f0f2ee-kube-api-access-68phr\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952457 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-log-ovn\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952571 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-additional-scripts\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952603 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run-ovn\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952724 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-log-ovn\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.952748 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run-ovn\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.953394 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-additional-scripts\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.953468 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.954707 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-scripts\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:23 crc kubenswrapper[4792]: I1202 18:57:23.971568 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68phr\" (UniqueName: \"kubernetes.io/projected/807aae1e-88ef-4966-8717-aae7b6f0f2ee-kube-api-access-68phr\") pod \"ovn-controller-s44lp-config-j6nlx\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.046174 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.082320 4792 generic.go:334] "Generic (PLEG): container finished" podID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerID="970ed1a9e9aefd99aafb9cf861569ab2b8f1f85a7d8d627a4cba42af2fd1adf8" exitCode=0 Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.082376 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba","Type":"ContainerDied","Data":"970ed1a9e9aefd99aafb9cf861569ab2b8f1f85a7d8d627a4cba42af2fd1adf8"} Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.205862 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-s44lp" Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.497246 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.497503 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="prometheus" containerID="cri-o://3f54a3b0040179d09f172163420f3935de7f9f98ccbfb59dedbb235ee6139d42" gracePeriod=600 Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.497607 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="thanos-sidecar" containerID="cri-o://ccd641fb17e97c23203209ae4f606f566c15f572d235008d3cc4f3c3e193011d" gracePeriod=600 Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.497628 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="config-reloader" containerID="cri-o://468bb6ff5c8c9b2fc5ac5d0d48cc56d770bd6b3c4aeb66e472caf0ca853b2128" gracePeriod=600 Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.569822 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-s44lp-config-j6nlx"] Dec 02 18:57:24 crc kubenswrapper[4792]: W1202 18:57:24.570990 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod807aae1e_88ef_4966_8717_aae7b6f0f2ee.slice/crio-161d0362f4d8a25d7b722635d8f06217798d3015002037a553d506a5ae6443b0 WatchSource:0}: Error finding container 161d0362f4d8a25d7b722635d8f06217798d3015002037a553d506a5ae6443b0: Status 404 returned error can't find the container with id 161d0362f4d8a25d7b722635d8f06217798d3015002037a553d506a5ae6443b0 Dec 02 18:57:24 crc kubenswrapper[4792]: I1202 18:57:24.898689 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="63ad1bca-0ff4-4694-ab0a-56e8f5366d88" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.093883 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp-config-j6nlx" event={"ID":"807aae1e-88ef-4966-8717-aae7b6f0f2ee","Type":"ContainerStarted","Data":"161d0362f4d8a25d7b722635d8f06217798d3015002037a553d506a5ae6443b0"} Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.099834 4792 generic.go:334] "Generic (PLEG): container finished" podID="357feab9-6738-4c52-8478-0763a304671f" containerID="ccd641fb17e97c23203209ae4f606f566c15f572d235008d3cc4f3c3e193011d" exitCode=0 Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.099868 4792 generic.go:334] "Generic (PLEG): container finished" podID="357feab9-6738-4c52-8478-0763a304671f" containerID="468bb6ff5c8c9b2fc5ac5d0d48cc56d770bd6b3c4aeb66e472caf0ca853b2128" exitCode=0 Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.099881 4792 generic.go:334] "Generic (PLEG): container finished" podID="357feab9-6738-4c52-8478-0763a304671f" containerID="3f54a3b0040179d09f172163420f3935de7f9f98ccbfb59dedbb235ee6139d42" exitCode=0 Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.099898 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerDied","Data":"ccd641fb17e97c23203209ae4f606f566c15f572d235008d3cc4f3c3e193011d"} Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.099919 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerDied","Data":"468bb6ff5c8c9b2fc5ac5d0d48cc56d770bd6b3c4aeb66e472caf0ca853b2128"} Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.099931 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerDied","Data":"3f54a3b0040179d09f172163420f3935de7f9f98ccbfb59dedbb235ee6139d42"} Dec 02 18:57:25 crc kubenswrapper[4792]: I1202 18:57:25.555960 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b44b9f39-dba9-44f9-980d-845b340b8922" path="/var/lib/kubelet/pods/b44b9f39-dba9-44f9-980d-845b340b8922/volumes" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.068503 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.129629 4792 generic.go:334] "Generic (PLEG): container finished" podID="807aae1e-88ef-4966-8717-aae7b6f0f2ee" containerID="28a6e102bd8aca4f2435f803b9316007e7b6db5356fc8621db7b90eb4d633390" exitCode=0 Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.129747 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp-config-j6nlx" event={"ID":"807aae1e-88ef-4966-8717-aae7b6f0f2ee","Type":"ContainerDied","Data":"28a6e102bd8aca4f2435f803b9316007e7b6db5356fc8621db7b90eb4d633390"} Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.132055 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba","Type":"ContainerStarted","Data":"2b24c929b322faa198f40aeb0652d002a7c1fa7fe0d8ade0379bccd9e518bbcb"} Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.132628 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.134906 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.134925 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"357feab9-6738-4c52-8478-0763a304671f","Type":"ContainerDied","Data":"8d9ecf63bc9aa656be629a802e625b9ef643962e0fba09e89cfae70926543921"} Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.135262 4792 scope.go:117] "RemoveContainer" containerID="ccd641fb17e97c23203209ae4f606f566c15f572d235008d3cc4f3c3e193011d" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.187349 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371947.667446 podStartE2EDuration="1m29.187328931s" podCreationTimestamp="2025-12-02 18:55:57 +0000 UTC" firstStartedPulling="2025-12-02 18:55:59.826249284 +0000 UTC m=+1190.599141612" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:26.1803413 +0000 UTC m=+1276.953233628" watchObservedRunningTime="2025-12-02 18:57:26.187328931 +0000 UTC m=+1276.960221259" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.233475 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26gl8\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-kube-api-access-26gl8\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.233903 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-tls-assets\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.233998 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-config\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.234089 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/357feab9-6738-4c52-8478-0763a304671f-config-out\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.234226 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.234262 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/357feab9-6738-4c52-8478-0763a304671f-prometheus-metric-storage-rulefiles-0\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.234290 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-web-config\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.234313 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-thanos-prometheus-http-client-file\") pod \"357feab9-6738-4c52-8478-0763a304671f\" (UID: \"357feab9-6738-4c52-8478-0763a304671f\") " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.235493 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/357feab9-6738-4c52-8478-0763a304671f-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.241658 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.252712 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.258179 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-config" (OuterVolumeSpecName: "config") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.258319 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/357feab9-6738-4c52-8478-0763a304671f-config-out" (OuterVolumeSpecName: "config-out") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.265152 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-kube-api-access-26gl8" (OuterVolumeSpecName: "kube-api-access-26gl8") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "kube-api-access-26gl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.279325 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-web-config" (OuterVolumeSpecName: "web-config") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.298479 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "357feab9-6738-4c52-8478-0763a304671f" (UID: "357feab9-6738-4c52-8478-0763a304671f"). InnerVolumeSpecName "pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337627 4792 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/357feab9-6738-4c52-8478-0763a304671f-config-out\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337681 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") on node \"crc\" " Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337697 4792 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/357feab9-6738-4c52-8478-0763a304671f-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337711 4792 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-web-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337722 4792 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337731 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26gl8\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-kube-api-access-26gl8\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337742 4792 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/357feab9-6738-4c52-8478-0763a304671f-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.337753 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/357feab9-6738-4c52-8478-0763a304671f-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.357849 4792 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.358005 4792 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd") on node "crc" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.439004 4792 reconciler_common.go:293] "Volume detached for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.465629 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.478867 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.515365 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:57:26 crc kubenswrapper[4792]: E1202 18:57:26.516000 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="config-reloader" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.516037 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="config-reloader" Dec 02 18:57:26 crc kubenswrapper[4792]: E1202 18:57:26.516057 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="thanos-sidecar" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.516063 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="thanos-sidecar" Dec 02 18:57:26 crc kubenswrapper[4792]: E1202 18:57:26.516078 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="prometheus" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.516084 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="prometheus" Dec 02 18:57:26 crc kubenswrapper[4792]: E1202 18:57:26.516117 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="init-config-reloader" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.516123 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="init-config-reloader" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.516377 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="thanos-sidecar" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.516418 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="config-reloader" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.516429 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="prometheus" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.521128 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.524487 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.524644 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.525266 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.525574 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-db9tf" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.527746 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.527779 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.541829 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.541970 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.644539 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.644865 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5d36d473-c89a-496a-ab27-d65535edb0ec-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.644918 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.644976 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5d36d473-c89a-496a-ab27-d65535edb0ec-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.645012 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.645052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.645068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.645090 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btk4l\" (UniqueName: \"kubernetes.io/projected/5d36d473-c89a-496a-ab27-d65535edb0ec-kube-api-access-btk4l\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.645126 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.645143 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5d36d473-c89a-496a-ab27-d65535edb0ec-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.645162 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-config\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748410 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748475 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5d36d473-c89a-496a-ab27-d65535edb0ec-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748494 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-config\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748558 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748598 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5d36d473-c89a-496a-ab27-d65535edb0ec-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748637 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748685 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5d36d473-c89a-496a-ab27-d65535edb0ec-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748715 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748754 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748773 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.748792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btk4l\" (UniqueName: \"kubernetes.io/projected/5d36d473-c89a-496a-ab27-d65535edb0ec-kube-api-access-btk4l\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.753199 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5d36d473-c89a-496a-ab27-d65535edb0ec-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.754105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.755282 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5d36d473-c89a-496a-ab27-d65535edb0ec-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.759013 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.759684 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-config\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.760100 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.763419 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.763476 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/14a22ce14f64bd088e22b4e9468d294093eded8b3f54a4c5617279316681f14a/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.777105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.777334 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5d36d473-c89a-496a-ab27-d65535edb0ec-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.777097 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5d36d473-c89a-496a-ab27-d65535edb0ec-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.786232 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btk4l\" (UniqueName: \"kubernetes.io/projected/5d36d473-c89a-496a-ab27-d65535edb0ec-kube-api-access-btk4l\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.821817 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f0561b14-c8c2-41be-a503-99f9bcbd40cd\") pod \"prometheus-metric-storage-0\" (UID: \"5d36d473-c89a-496a-ab27-d65535edb0ec\") " pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:26 crc kubenswrapper[4792]: I1202 18:57:26.839557 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:27 crc kubenswrapper[4792]: I1202 18:57:27.552916 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="357feab9-6738-4c52-8478-0763a304671f" path="/var/lib/kubelet/pods/357feab9-6738-4c52-8478-0763a304671f/volumes" Dec 02 18:57:27 crc kubenswrapper[4792]: I1202 18:57:27.565857 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:57:27 crc kubenswrapper[4792]: I1202 18:57:27.575483 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/252fab2e-fcb7-43e8-940a-48adc8f4ebd5-etc-swift\") pod \"swift-storage-0\" (UID: \"252fab2e-fcb7-43e8-940a-48adc8f4ebd5\") " pod="openstack/swift-storage-0" Dec 02 18:57:27 crc kubenswrapper[4792]: I1202 18:57:27.711596 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 18:57:28 crc kubenswrapper[4792]: I1202 18:57:28.942282 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="357feab9-6738-4c52-8478-0763a304671f" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.114:9090/-/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 18:57:29 crc kubenswrapper[4792]: I1202 18:57:29.322828 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 18:57:30 crc kubenswrapper[4792]: E1202 18:57:30.160005 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f2d96d0_f671_46cd_8e95_162a0773470d.slice\": RecentStats: unable to find data in memory cache]" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.631420 4792 scope.go:117] "RemoveContainer" containerID="468bb6ff5c8c9b2fc5ac5d0d48cc56d770bd6b3c4aeb66e472caf0ca853b2128" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.849066 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.863007 4792 scope.go:117] "RemoveContainer" containerID="3f54a3b0040179d09f172163420f3935de7f9f98ccbfb59dedbb235ee6139d42" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.900268 4792 scope.go:117] "RemoveContainer" containerID="e5a2809d92c07f18aae52eb3690be10358336df409d78b614cc40481bfa486bd" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.998643 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run\") pod \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.998789 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-log-ovn\") pod \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.998870 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run-ovn\") pod \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.998943 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run" (OuterVolumeSpecName: "var-run") pod "807aae1e-88ef-4966-8717-aae7b6f0f2ee" (UID: "807aae1e-88ef-4966-8717-aae7b6f0f2ee"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.999011 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "807aae1e-88ef-4966-8717-aae7b6f0f2ee" (UID: "807aae1e-88ef-4966-8717-aae7b6f0f2ee"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.999060 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-additional-scripts\") pod \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.999104 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "807aae1e-88ef-4966-8717-aae7b6f0f2ee" (UID: "807aae1e-88ef-4966-8717-aae7b6f0f2ee"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.999113 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-scripts\") pod \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.999221 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68phr\" (UniqueName: \"kubernetes.io/projected/807aae1e-88ef-4966-8717-aae7b6f0f2ee-kube-api-access-68phr\") pod \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\" (UID: \"807aae1e-88ef-4966-8717-aae7b6f0f2ee\") " Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.999893 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "807aae1e-88ef-4966-8717-aae7b6f0f2ee" (UID: "807aae1e-88ef-4966-8717-aae7b6f0f2ee"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:33 crc kubenswrapper[4792]: I1202 18:57:33.999951 4792 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:33.999991 4792 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-run\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.000004 4792 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/807aae1e-88ef-4966-8717-aae7b6f0f2ee-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.000655 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-scripts" (OuterVolumeSpecName: "scripts") pod "807aae1e-88ef-4966-8717-aae7b6f0f2ee" (UID: "807aae1e-88ef-4966-8717-aae7b6f0f2ee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.004245 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/807aae1e-88ef-4966-8717-aae7b6f0f2ee-kube-api-access-68phr" (OuterVolumeSpecName: "kube-api-access-68phr") pod "807aae1e-88ef-4966-8717-aae7b6f0f2ee" (UID: "807aae1e-88ef-4966-8717-aae7b6f0f2ee"). InnerVolumeSpecName "kube-api-access-68phr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.101738 4792 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.102074 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/807aae1e-88ef-4966-8717-aae7b6f0f2ee-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.102086 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68phr\" (UniqueName: \"kubernetes.io/projected/807aae1e-88ef-4966-8717-aae7b6f0f2ee-kube-api-access-68phr\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.240065 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 02 18:57:34 crc kubenswrapper[4792]: W1202 18:57:34.252844 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d36d473_c89a_496a_ab27_d65535edb0ec.slice/crio-f78df245d8ca9691198b78fe3cb979e9853e3bdbe0731fa6192a5825c5af9385 WatchSource:0}: Error finding container f78df245d8ca9691198b78fe3cb979e9853e3bdbe0731fa6192a5825c5af9385: Status 404 returned error can't find the container with id f78df245d8ca9691198b78fe3cb979e9853e3bdbe0731fa6192a5825c5af9385 Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.253389 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-s44lp-config-j6nlx" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.253971 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-s44lp-config-j6nlx" event={"ID":"807aae1e-88ef-4966-8717-aae7b6f0f2ee","Type":"ContainerDied","Data":"161d0362f4d8a25d7b722635d8f06217798d3015002037a553d506a5ae6443b0"} Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.254012 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="161d0362f4d8a25d7b722635d8f06217798d3015002037a553d506a5ae6443b0" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.317195 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 18:57:34 crc kubenswrapper[4792]: W1202 18:57:34.322705 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod252fab2e_fcb7_43e8_940a_48adc8f4ebd5.slice/crio-c97a32c6ca56d5451407979175c8e51b703b8b7ec393a02065e036fa693ec2d6 WatchSource:0}: Error finding container c97a32c6ca56d5451407979175c8e51b703b8b7ec393a02065e036fa693ec2d6: Status 404 returned error can't find the container with id c97a32c6ca56d5451407979175c8e51b703b8b7ec393a02065e036fa693ec2d6 Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.906367 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.975858 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-s44lp-config-j6nlx"] Dec 02 18:57:34 crc kubenswrapper[4792]: I1202 18:57:34.998538 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-s44lp-config-j6nlx"] Dec 02 18:57:35 crc kubenswrapper[4792]: I1202 18:57:35.266262 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wh2n2" event={"ID":"98cbdc65-cc24-4a81-899b-66de1d1a6ca3","Type":"ContainerStarted","Data":"adc67c19fa767df57299a569e428f3fbccf0407769242da70dc7bcb7f38acb78"} Dec 02 18:57:35 crc kubenswrapper[4792]: I1202 18:57:35.269003 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"c97a32c6ca56d5451407979175c8e51b703b8b7ec393a02065e036fa693ec2d6"} Dec 02 18:57:35 crc kubenswrapper[4792]: I1202 18:57:35.270107 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5d36d473-c89a-496a-ab27-d65535edb0ec","Type":"ContainerStarted","Data":"f78df245d8ca9691198b78fe3cb979e9853e3bdbe0731fa6192a5825c5af9385"} Dec 02 18:57:35 crc kubenswrapper[4792]: I1202 18:57:35.296021 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-wh2n2" podStartSLOduration=2.847438275 podStartE2EDuration="17.295999754s" podCreationTimestamp="2025-12-02 18:57:18 +0000 UTC" firstStartedPulling="2025-12-02 18:57:19.300908202 +0000 UTC m=+1270.073800520" lastFinishedPulling="2025-12-02 18:57:33.749469661 +0000 UTC m=+1284.522361999" observedRunningTime="2025-12-02 18:57:35.282602117 +0000 UTC m=+1286.055494465" watchObservedRunningTime="2025-12-02 18:57:35.295999754 +0000 UTC m=+1286.068892112" Dec 02 18:57:35 crc kubenswrapper[4792]: I1202 18:57:35.559827 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="807aae1e-88ef-4966-8717-aae7b6f0f2ee" path="/var/lib/kubelet/pods/807aae1e-88ef-4966-8717-aae7b6f0f2ee/volumes" Dec 02 18:57:36 crc kubenswrapper[4792]: I1202 18:57:36.282545 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"96584e2b3aeeb159b97df66863b8bdd4d1cc5717fe6baa6de3665548b24f8632"} Dec 02 18:57:36 crc kubenswrapper[4792]: I1202 18:57:36.282838 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"5b0f097031f757cbada9237b22fa69cc184b8327f05413a1cc3b92bed9aefcd9"} Dec 02 18:57:37 crc kubenswrapper[4792]: I1202 18:57:37.304765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"183bc7b212ef61d31ecd5b9a2025e1285f8acc845197f8d4b9b9c881d8f6cd16"} Dec 02 18:57:37 crc kubenswrapper[4792]: I1202 18:57:37.305179 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"1eb796e1e157d397730d21ae81b82edee5a51e4d11fe01bed69cc52cdc514472"} Dec 02 18:57:38 crc kubenswrapper[4792]: I1202 18:57:38.321223 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"5929b3a5ab44adbb1bc4b73e578eb121258696acf7f123b7b75fdb2c1d132026"} Dec 02 18:57:38 crc kubenswrapper[4792]: I1202 18:57:38.322910 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5d36d473-c89a-496a-ab27-d65535edb0ec","Type":"ContainerStarted","Data":"9c494caf08d971d6546a689f20c025202fd3dbd21e70a06a9109a25bef1e68a2"} Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.348060 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"88008edfad73a60f3814ba056c918aa8270f7f8e642b3f796d05b6361e11b5ab"} Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.348494 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"b1667ae7b38c4a8fe86c47dcf99a89691513d953d7f4d5221ef5cedeb725bc05"} Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.348514 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"4fb70099723584eedf025a84c301f9811c8b65580a66e2f3ea12b638cfb0e189"} Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.358863 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.745245 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-h2qp6"] Dec 02 18:57:39 crc kubenswrapper[4792]: E1202 18:57:39.745956 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="807aae1e-88ef-4966-8717-aae7b6f0f2ee" containerName="ovn-config" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.745969 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="807aae1e-88ef-4966-8717-aae7b6f0f2ee" containerName="ovn-config" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.746137 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="807aae1e-88ef-4966-8717-aae7b6f0f2ee" containerName="ovn-config" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.746745 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.795633 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-h2qp6"] Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.823635 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-244xq\" (UniqueName: \"kubernetes.io/projected/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-kube-api-access-244xq\") pod \"cloudkitty-db-create-h2qp6\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.823756 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-operator-scripts\") pod \"cloudkitty-db-create-h2qp6\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.842326 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-dsqd2"] Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.844113 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.865184 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dsqd2"] Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.925958 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17f2eea9-c362-4195-a8fc-8d392d045f4f-operator-scripts\") pod \"cinder-db-create-dsqd2\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.926469 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmdcb\" (UniqueName: \"kubernetes.io/projected/17f2eea9-c362-4195-a8fc-8d392d045f4f-kube-api-access-bmdcb\") pod \"cinder-db-create-dsqd2\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.926635 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-244xq\" (UniqueName: \"kubernetes.io/projected/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-kube-api-access-244xq\") pod \"cloudkitty-db-create-h2qp6\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.926799 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-operator-scripts\") pod \"cloudkitty-db-create-h2qp6\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.927564 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-operator-scripts\") pod \"cloudkitty-db-create-h2qp6\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.946906 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-87a8-account-create-update-fbvt2"] Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.948062 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.952749 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.953450 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-244xq\" (UniqueName: \"kubernetes.io/projected/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-kube-api-access-244xq\") pod \"cloudkitty-db-create-h2qp6\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:39 crc kubenswrapper[4792]: I1202 18:57:39.959079 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-87a8-account-create-update-fbvt2"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.006309 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-jj9jh"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.007515 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.009737 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.010441 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.010843 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.013091 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phdxk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.021578 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jj9jh"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.028393 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmdcb\" (UniqueName: \"kubernetes.io/projected/17f2eea9-c362-4195-a8fc-8d392d045f4f-kube-api-access-bmdcb\") pod \"cinder-db-create-dsqd2\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.028460 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgcls\" (UniqueName: \"kubernetes.io/projected/366ad881-59b1-434f-b500-3cb185421ebe-kube-api-access-jgcls\") pod \"cinder-87a8-account-create-update-fbvt2\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.028512 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17f2eea9-c362-4195-a8fc-8d392d045f4f-operator-scripts\") pod \"cinder-db-create-dsqd2\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.028587 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/366ad881-59b1-434f-b500-3cb185421ebe-operator-scripts\") pod \"cinder-87a8-account-create-update-fbvt2\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.029294 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17f2eea9-c362-4195-a8fc-8d392d045f4f-operator-scripts\") pod \"cinder-db-create-dsqd2\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.068896 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.116120 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmdcb\" (UniqueName: \"kubernetes.io/projected/17f2eea9-c362-4195-a8fc-8d392d045f4f-kube-api-access-bmdcb\") pod \"cinder-db-create-dsqd2\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.129683 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/366ad881-59b1-434f-b500-3cb185421ebe-operator-scripts\") pod \"cinder-87a8-account-create-update-fbvt2\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.129736 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-config-data\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.129784 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-combined-ca-bundle\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.129826 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgcls\" (UniqueName: \"kubernetes.io/projected/366ad881-59b1-434f-b500-3cb185421ebe-kube-api-access-jgcls\") pod \"cinder-87a8-account-create-update-fbvt2\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.129846 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9h8q\" (UniqueName: \"kubernetes.io/projected/a662d52b-8774-40a9-b965-ca41d5f1c6c4-kube-api-access-r9h8q\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.130599 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/366ad881-59b1-434f-b500-3cb185421ebe-operator-scripts\") pod \"cinder-87a8-account-create-update-fbvt2\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.144905 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-4svmb"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.146027 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.157782 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-ebea-account-create-update-fq5wk"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.158969 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.160763 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.160906 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.167943 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgcls\" (UniqueName: \"kubernetes.io/projected/366ad881-59b1-434f-b500-3cb185421ebe-kube-api-access-jgcls\") pod \"cinder-87a8-account-create-update-fbvt2\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.177964 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4svmb"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.209589 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-ebea-account-create-update-fq5wk"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.232645 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39d51baf-c92a-413f-9257-facc87ce7084-operator-scripts\") pod \"barbican-db-create-4svmb\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.232981 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-config-data\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.233121 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v79l8\" (UniqueName: \"kubernetes.io/projected/0c65caa0-11c0-4a4d-b58d-cb17efd01928-kube-api-access-v79l8\") pod \"cloudkitty-ebea-account-create-update-fq5wk\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.233237 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-combined-ca-bundle\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.233383 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9h8q\" (UniqueName: \"kubernetes.io/projected/a662d52b-8774-40a9-b965-ca41d5f1c6c4-kube-api-access-r9h8q\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.234273 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqddp\" (UniqueName: \"kubernetes.io/projected/39d51baf-c92a-413f-9257-facc87ce7084-kube-api-access-zqddp\") pod \"barbican-db-create-4svmb\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.234431 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c65caa0-11c0-4a4d-b58d-cb17efd01928-operator-scripts\") pod \"cloudkitty-ebea-account-create-update-fq5wk\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.249408 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-config-data\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.257183 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-combined-ca-bundle\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.298786 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9h8q\" (UniqueName: \"kubernetes.io/projected/a662d52b-8774-40a9-b965-ca41d5f1c6c4-kube-api-access-r9h8q\") pod \"keystone-db-sync-jj9jh\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.304232 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.308220 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-1802-account-create-update-tfzgc"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.309482 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.311373 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.330804 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.349268 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v79l8\" (UniqueName: \"kubernetes.io/projected/0c65caa0-11c0-4a4d-b58d-cb17efd01928-kube-api-access-v79l8\") pod \"cloudkitty-ebea-account-create-update-fq5wk\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.349610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqddp\" (UniqueName: \"kubernetes.io/projected/39d51baf-c92a-413f-9257-facc87ce7084-kube-api-access-zqddp\") pod \"barbican-db-create-4svmb\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.349634 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c65caa0-11c0-4a4d-b58d-cb17efd01928-operator-scripts\") pod \"cloudkitty-ebea-account-create-update-fq5wk\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.349699 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39d51baf-c92a-413f-9257-facc87ce7084-operator-scripts\") pod \"barbican-db-create-4svmb\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.350294 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39d51baf-c92a-413f-9257-facc87ce7084-operator-scripts\") pod \"barbican-db-create-4svmb\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.354287 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c65caa0-11c0-4a4d-b58d-cb17efd01928-operator-scripts\") pod \"cloudkitty-ebea-account-create-update-fq5wk\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.356806 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1802-account-create-update-tfzgc"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.396763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqddp\" (UniqueName: \"kubernetes.io/projected/39d51baf-c92a-413f-9257-facc87ce7084-kube-api-access-zqddp\") pod \"barbican-db-create-4svmb\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.402237 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v79l8\" (UniqueName: \"kubernetes.io/projected/0c65caa0-11c0-4a4d-b58d-cb17efd01928-kube-api-access-v79l8\") pod \"cloudkitty-ebea-account-create-update-fq5wk\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.434062 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.451601 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f80d3801-ef59-4680-9736-dc6d78d1b7d8-operator-scripts\") pod \"barbican-1802-account-create-update-tfzgc\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.451649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwntc\" (UniqueName: \"kubernetes.io/projected/f80d3801-ef59-4680-9736-dc6d78d1b7d8-kube-api-access-fwntc\") pod \"barbican-1802-account-create-update-tfzgc\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.557598 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f80d3801-ef59-4680-9736-dc6d78d1b7d8-operator-scripts\") pod \"barbican-1802-account-create-update-tfzgc\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.557685 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwntc\" (UniqueName: \"kubernetes.io/projected/f80d3801-ef59-4680-9736-dc6d78d1b7d8-kube-api-access-fwntc\") pod \"barbican-1802-account-create-update-tfzgc\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.559191 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f80d3801-ef59-4680-9736-dc6d78d1b7d8-operator-scripts\") pod \"barbican-1802-account-create-update-tfzgc\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.596645 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-26zbd"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.598140 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.613994 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwntc\" (UniqueName: \"kubernetes.io/projected/f80d3801-ef59-4680-9736-dc6d78d1b7d8-kube-api-access-fwntc\") pod \"barbican-1802-account-create-update-tfzgc\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.635975 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-26zbd"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.650035 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.663634 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8a45-account-create-update-rvtjj"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.665042 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.668342 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.683658 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8a45-account-create-update-rvtjj"] Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.746116 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.761190 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcq4d\" (UniqueName: \"kubernetes.io/projected/6acb750d-2221-421a-af1c-dfe569427350-kube-api-access-bcq4d\") pod \"neutron-db-create-26zbd\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.761319 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkh4l\" (UniqueName: \"kubernetes.io/projected/96ac9638-e157-43ba-b12c-96b502226293-kube-api-access-jkh4l\") pod \"neutron-8a45-account-create-update-rvtjj\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.761375 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ac9638-e157-43ba-b12c-96b502226293-operator-scripts\") pod \"neutron-8a45-account-create-update-rvtjj\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.761449 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acb750d-2221-421a-af1c-dfe569427350-operator-scripts\") pod \"neutron-db-create-26zbd\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:40 crc kubenswrapper[4792]: E1202 18:57:40.834294 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f2d96d0_f671_46cd_8e95_162a0773470d.slice\": RecentStats: unable to find data in memory cache]" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.862935 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkh4l\" (UniqueName: \"kubernetes.io/projected/96ac9638-e157-43ba-b12c-96b502226293-kube-api-access-jkh4l\") pod \"neutron-8a45-account-create-update-rvtjj\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.863006 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ac9638-e157-43ba-b12c-96b502226293-operator-scripts\") pod \"neutron-8a45-account-create-update-rvtjj\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.863066 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acb750d-2221-421a-af1c-dfe569427350-operator-scripts\") pod \"neutron-db-create-26zbd\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.863154 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcq4d\" (UniqueName: \"kubernetes.io/projected/6acb750d-2221-421a-af1c-dfe569427350-kube-api-access-bcq4d\") pod \"neutron-db-create-26zbd\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.864915 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ac9638-e157-43ba-b12c-96b502226293-operator-scripts\") pod \"neutron-8a45-account-create-update-rvtjj\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.874794 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acb750d-2221-421a-af1c-dfe569427350-operator-scripts\") pod \"neutron-db-create-26zbd\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.881722 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcq4d\" (UniqueName: \"kubernetes.io/projected/6acb750d-2221-421a-af1c-dfe569427350-kube-api-access-bcq4d\") pod \"neutron-db-create-26zbd\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.887130 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkh4l\" (UniqueName: \"kubernetes.io/projected/96ac9638-e157-43ba-b12c-96b502226293-kube-api-access-jkh4l\") pod \"neutron-8a45-account-create-update-rvtjj\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:40 crc kubenswrapper[4792]: I1202 18:57:40.956915 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.033196 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.131377 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-h2qp6"] Dec 02 18:57:41 crc kubenswrapper[4792]: W1202 18:57:41.151908 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ac4b779_c2dd_4da8_a15d_e3d8ad165510.slice/crio-0f792a0240f10b11198f2a2d1d76dedb815978d952e5c42b78e0bb6249a40975 WatchSource:0}: Error finding container 0f792a0240f10b11198f2a2d1d76dedb815978d952e5c42b78e0bb6249a40975: Status 404 returned error can't find the container with id 0f792a0240f10b11198f2a2d1d76dedb815978d952e5c42b78e0bb6249a40975 Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.181634 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-jj9jh"] Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.191497 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dsqd2"] Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.350919 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-87a8-account-create-update-fbvt2"] Dec 02 18:57:41 crc kubenswrapper[4792]: W1202 18:57:41.361587 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod366ad881_59b1_434f_b500_3cb185421ebe.slice/crio-778d20ec3ec9d2fba91354829d4b72d791b579364e383a894e4536ab8e3d41fe WatchSource:0}: Error finding container 778d20ec3ec9d2fba91354829d4b72d791b579364e383a894e4536ab8e3d41fe: Status 404 returned error can't find the container with id 778d20ec3ec9d2fba91354829d4b72d791b579364e383a894e4536ab8e3d41fe Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.374818 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-ebea-account-create-update-fq5wk"] Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.406101 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4svmb"] Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.409148 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dsqd2" event={"ID":"17f2eea9-c362-4195-a8fc-8d392d045f4f","Type":"ContainerStarted","Data":"d1f9423d663b6e88b27e567000497de0316460778ab9ffbfa868db89f6cceba0"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.409182 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dsqd2" event={"ID":"17f2eea9-c362-4195-a8fc-8d392d045f4f","Type":"ContainerStarted","Data":"9b3840e07f296911e6c57890426a723be82a90f242f4420b2282570691067b3a"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.420390 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" event={"ID":"0c65caa0-11c0-4a4d-b58d-cb17efd01928","Type":"ContainerStarted","Data":"5eb5477e8a5c663dd162e07720f1f6c872e75e7a0d4958e4ce9ffb9b64c990f5"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.423225 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-87a8-account-create-update-fbvt2" event={"ID":"366ad881-59b1-434f-b500-3cb185421ebe","Type":"ContainerStarted","Data":"778d20ec3ec9d2fba91354829d4b72d791b579364e383a894e4536ab8e3d41fe"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.430994 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-dsqd2" podStartSLOduration=2.430975163 podStartE2EDuration="2.430975163s" podCreationTimestamp="2025-12-02 18:57:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:41.427940035 +0000 UTC m=+1292.200832363" watchObservedRunningTime="2025-12-02 18:57:41.430975163 +0000 UTC m=+1292.203867491" Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.431954 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jj9jh" event={"ID":"a662d52b-8774-40a9-b965-ca41d5f1c6c4","Type":"ContainerStarted","Data":"6349ae0077f008786051e935f2ccf63875e2164e5bb254f0eb0c137c4ab1287d"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.438500 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-h2qp6" event={"ID":"5ac4b779-c2dd-4da8-a15d-e3d8ad165510","Type":"ContainerStarted","Data":"5e1cfb116ed3cac0798487d859c3c197e31c43a358a6c47449b7a6cd073e7a0f"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.438614 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-h2qp6" event={"ID":"5ac4b779-c2dd-4da8-a15d-e3d8ad165510","Type":"ContainerStarted","Data":"0f792a0240f10b11198f2a2d1d76dedb815978d952e5c42b78e0bb6249a40975"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.445724 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4svmb" event={"ID":"39d51baf-c92a-413f-9257-facc87ce7084","Type":"ContainerStarted","Data":"778acc4151a4643df833481ab29846b3d7f4e41085c3698f2203954b32fe344b"} Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.465602 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-create-h2qp6" podStartSLOduration=2.46558596 podStartE2EDuration="2.46558596s" podCreationTimestamp="2025-12-02 18:57:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:41.462456869 +0000 UTC m=+1292.235349197" watchObservedRunningTime="2025-12-02 18:57:41.46558596 +0000 UTC m=+1292.238478288" Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.528412 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1802-account-create-update-tfzgc"] Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.641325 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-26zbd"] Dec 02 18:57:41 crc kubenswrapper[4792]: W1202 18:57:41.647061 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6acb750d_2221_421a_af1c_dfe569427350.slice/crio-329e8d9b5dbf5d1b48f7e8599821f3d391a036a0f0548f06f9e9a2a79b927268 WatchSource:0}: Error finding container 329e8d9b5dbf5d1b48f7e8599821f3d391a036a0f0548f06f9e9a2a79b927268: Status 404 returned error can't find the container with id 329e8d9b5dbf5d1b48f7e8599821f3d391a036a0f0548f06f9e9a2a79b927268 Dec 02 18:57:41 crc kubenswrapper[4792]: I1202 18:57:41.822057 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8a45-account-create-update-rvtjj"] Dec 02 18:57:42 crc kubenswrapper[4792]: W1202 18:57:42.097680 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96ac9638_e157_43ba_b12c_96b502226293.slice/crio-2eb793d197a61824a6698cb3e9ed0f8b253613548ece3b1464e74635094b015d WatchSource:0}: Error finding container 2eb793d197a61824a6698cb3e9ed0f8b253613548ece3b1464e74635094b015d: Status 404 returned error can't find the container with id 2eb793d197a61824a6698cb3e9ed0f8b253613548ece3b1464e74635094b015d Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.456490 4792 generic.go:334] "Generic (PLEG): container finished" podID="39d51baf-c92a-413f-9257-facc87ce7084" containerID="971f40e79711af1836c9a7d7182b7f0b4e134e768c6f4d8c65b27d9c1bb80c7e" exitCode=0 Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.456551 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4svmb" event={"ID":"39d51baf-c92a-413f-9257-facc87ce7084","Type":"ContainerDied","Data":"971f40e79711af1836c9a7d7182b7f0b4e134e768c6f4d8c65b27d9c1bb80c7e"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.465216 4792 generic.go:334] "Generic (PLEG): container finished" podID="17f2eea9-c362-4195-a8fc-8d392d045f4f" containerID="d1f9423d663b6e88b27e567000497de0316460778ab9ffbfa868db89f6cceba0" exitCode=0 Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.465287 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dsqd2" event={"ID":"17f2eea9-c362-4195-a8fc-8d392d045f4f","Type":"ContainerDied","Data":"d1f9423d663b6e88b27e567000497de0316460778ab9ffbfa868db89f6cceba0"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.467639 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8a45-account-create-update-rvtjj" event={"ID":"96ac9638-e157-43ba-b12c-96b502226293","Type":"ContainerStarted","Data":"3256698347e69cbc8074b6f33febc449877278b05f18e0bc6b4ee7599aba0d70"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.467683 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8a45-account-create-update-rvtjj" event={"ID":"96ac9638-e157-43ba-b12c-96b502226293","Type":"ContainerStarted","Data":"2eb793d197a61824a6698cb3e9ed0f8b253613548ece3b1464e74635094b015d"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.470586 4792 generic.go:334] "Generic (PLEG): container finished" podID="0c65caa0-11c0-4a4d-b58d-cb17efd01928" containerID="06d4b02acfe1120ad569c8d49725276df8e5f5458989c1b57b84bade5c8f3d65" exitCode=0 Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.470643 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" event={"ID":"0c65caa0-11c0-4a4d-b58d-cb17efd01928","Type":"ContainerDied","Data":"06d4b02acfe1120ad569c8d49725276df8e5f5458989c1b57b84bade5c8f3d65"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.474236 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"7e170a8796e0ddc877e7ece50ffbc2c6b6a4a5e4eb4be3aa08096da1ffc6834a"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.478334 4792 generic.go:334] "Generic (PLEG): container finished" podID="366ad881-59b1-434f-b500-3cb185421ebe" containerID="88e22d4562a0ec990d2aca3a63183106a1dda487d925ddf9ef0a75c735e1e6a6" exitCode=0 Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.478414 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-87a8-account-create-update-fbvt2" event={"ID":"366ad881-59b1-434f-b500-3cb185421ebe","Type":"ContainerDied","Data":"88e22d4562a0ec990d2aca3a63183106a1dda487d925ddf9ef0a75c735e1e6a6"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.480773 4792 generic.go:334] "Generic (PLEG): container finished" podID="5ac4b779-c2dd-4da8-a15d-e3d8ad165510" containerID="5e1cfb116ed3cac0798487d859c3c197e31c43a358a6c47449b7a6cd073e7a0f" exitCode=0 Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.480833 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-h2qp6" event={"ID":"5ac4b779-c2dd-4da8-a15d-e3d8ad165510","Type":"ContainerDied","Data":"5e1cfb116ed3cac0798487d859c3c197e31c43a358a6c47449b7a6cd073e7a0f"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.482888 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-26zbd" event={"ID":"6acb750d-2221-421a-af1c-dfe569427350","Type":"ContainerStarted","Data":"a57f2be1b93544777aba8e2b1c6ff1a1ab422cb15451bc5e1c51aabbac78894d"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.482920 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-26zbd" event={"ID":"6acb750d-2221-421a-af1c-dfe569427350","Type":"ContainerStarted","Data":"329e8d9b5dbf5d1b48f7e8599821f3d391a036a0f0548f06f9e9a2a79b927268"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.490178 4792 generic.go:334] "Generic (PLEG): container finished" podID="f80d3801-ef59-4680-9736-dc6d78d1b7d8" containerID="e13249fc8d8ebedf700437f85ac25bdd37d4993ffeddb7cb15cb6969849a90a3" exitCode=0 Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.490216 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1802-account-create-update-tfzgc" event={"ID":"f80d3801-ef59-4680-9736-dc6d78d1b7d8","Type":"ContainerDied","Data":"e13249fc8d8ebedf700437f85ac25bdd37d4993ffeddb7cb15cb6969849a90a3"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.490235 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1802-account-create-update-tfzgc" event={"ID":"f80d3801-ef59-4680-9736-dc6d78d1b7d8","Type":"ContainerStarted","Data":"dac96c0ad8c8b4b4e2d6a514303a256f51227ab2b3e6ec91286c8d39fb895b5f"} Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.496842 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-8a45-account-create-update-rvtjj" podStartSLOduration=2.496824382 podStartE2EDuration="2.496824382s" podCreationTimestamp="2025-12-02 18:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:42.488874586 +0000 UTC m=+1293.261766914" watchObservedRunningTime="2025-12-02 18:57:42.496824382 +0000 UTC m=+1293.269716710" Dec 02 18:57:42 crc kubenswrapper[4792]: I1202 18:57:42.570890 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-26zbd" podStartSLOduration=2.57086889 podStartE2EDuration="2.57086889s" podCreationTimestamp="2025-12-02 18:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:42.563192572 +0000 UTC m=+1293.336084890" watchObservedRunningTime="2025-12-02 18:57:42.57086889 +0000 UTC m=+1293.343761218" Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.500926 4792 generic.go:334] "Generic (PLEG): container finished" podID="96ac9638-e157-43ba-b12c-96b502226293" containerID="3256698347e69cbc8074b6f33febc449877278b05f18e0bc6b4ee7599aba0d70" exitCode=0 Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.501300 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8a45-account-create-update-rvtjj" event={"ID":"96ac9638-e157-43ba-b12c-96b502226293","Type":"ContainerDied","Data":"3256698347e69cbc8074b6f33febc449877278b05f18e0bc6b4ee7599aba0d70"} Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.508881 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"28fb2b99c3a920d508df365bd0040b88ad890bf9fb72f1424e2c8535579589f2"} Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.508923 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"e31f5adfd60704a6619251433158d9874aa9a0e7f20c3787630a9b405c59621c"} Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.508935 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"994fee9d6758993245b292af12a1d26bc2cc0ac85cf406622afe8f5001a7fe8c"} Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.508966 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"8185eed4b67edd7a3357f7093778bca69e84a4be3f4a510f4b57420734f88d39"} Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.508980 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"ba19873c8553d9caf13018a713e494a77db370c62f3e0399bb045f3a8610f296"} Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.511121 4792 generic.go:334] "Generic (PLEG): container finished" podID="6acb750d-2221-421a-af1c-dfe569427350" containerID="a57f2be1b93544777aba8e2b1c6ff1a1ab422cb15451bc5e1c51aabbac78894d" exitCode=0 Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.511199 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-26zbd" event={"ID":"6acb750d-2221-421a-af1c-dfe569427350","Type":"ContainerDied","Data":"a57f2be1b93544777aba8e2b1c6ff1a1ab422cb15451bc5e1c51aabbac78894d"} Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.523296 4792 generic.go:334] "Generic (PLEG): container finished" podID="98cbdc65-cc24-4a81-899b-66de1d1a6ca3" containerID="adc67c19fa767df57299a569e428f3fbccf0407769242da70dc7bcb7f38acb78" exitCode=0 Dec 02 18:57:43 crc kubenswrapper[4792]: I1202 18:57:43.523419 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wh2n2" event={"ID":"98cbdc65-cc24-4a81-899b-66de1d1a6ca3","Type":"ContainerDied","Data":"adc67c19fa767df57299a569e428f3fbccf0407769242da70dc7bcb7f38acb78"} Dec 02 18:57:45 crc kubenswrapper[4792]: I1202 18:57:45.548154 4792 generic.go:334] "Generic (PLEG): container finished" podID="5d36d473-c89a-496a-ab27-d65535edb0ec" containerID="9c494caf08d971d6546a689f20c025202fd3dbd21e70a06a9109a25bef1e68a2" exitCode=0 Dec 02 18:57:45 crc kubenswrapper[4792]: I1202 18:57:45.550618 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5d36d473-c89a-496a-ab27-d65535edb0ec","Type":"ContainerDied","Data":"9c494caf08d971d6546a689f20c025202fd3dbd21e70a06a9109a25bef1e68a2"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.462330 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.557133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-wh2n2" event={"ID":"98cbdc65-cc24-4a81-899b-66de1d1a6ca3","Type":"ContainerDied","Data":"a6b26d5ae4fe9cd5c069c642eec76460db5b08c05a5250ec3b2c3788e8b1b336"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.557173 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6b26d5ae4fe9cd5c069c642eec76460db5b08c05a5250ec3b2c3788e8b1b336" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.558119 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8a45-account-create-update-rvtjj" event={"ID":"96ac9638-e157-43ba-b12c-96b502226293","Type":"ContainerDied","Data":"2eb793d197a61824a6698cb3e9ed0f8b253613548ece3b1464e74635094b015d"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.558141 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eb793d197a61824a6698cb3e9ed0f8b253613548ece3b1464e74635094b015d" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.559411 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" event={"ID":"0c65caa0-11c0-4a4d-b58d-cb17efd01928","Type":"ContainerDied","Data":"5eb5477e8a5c663dd162e07720f1f6c872e75e7a0d4958e4ce9ffb9b64c990f5"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.559430 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5eb5477e8a5c663dd162e07720f1f6c872e75e7a0d4958e4ce9ffb9b64c990f5" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.560509 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-h2qp6" event={"ID":"5ac4b779-c2dd-4da8-a15d-e3d8ad165510","Type":"ContainerDied","Data":"0f792a0240f10b11198f2a2d1d76dedb815978d952e5c42b78e0bb6249a40975"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.560550 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f792a0240f10b11198f2a2d1d76dedb815978d952e5c42b78e0bb6249a40975" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.561552 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dsqd2" event={"ID":"17f2eea9-c362-4195-a8fc-8d392d045f4f","Type":"ContainerDied","Data":"9b3840e07f296911e6c57890426a723be82a90f242f4420b2282570691067b3a"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.561572 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b3840e07f296911e6c57890426a723be82a90f242f4420b2282570691067b3a" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.561622 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dsqd2" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.563335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-26zbd" event={"ID":"6acb750d-2221-421a-af1c-dfe569427350","Type":"ContainerDied","Data":"329e8d9b5dbf5d1b48f7e8599821f3d391a036a0f0548f06f9e9a2a79b927268"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.563355 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="329e8d9b5dbf5d1b48f7e8599821f3d391a036a0f0548f06f9e9a2a79b927268" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.564474 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4svmb" event={"ID":"39d51baf-c92a-413f-9257-facc87ce7084","Type":"ContainerDied","Data":"778acc4151a4643df833481ab29846b3d7f4e41085c3698f2203954b32fe344b"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.564494 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="778acc4151a4643df833481ab29846b3d7f4e41085c3698f2203954b32fe344b" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.565530 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1802-account-create-update-tfzgc" event={"ID":"f80d3801-ef59-4680-9736-dc6d78d1b7d8","Type":"ContainerDied","Data":"dac96c0ad8c8b4b4e2d6a514303a256f51227ab2b3e6ec91286c8d39fb895b5f"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.565549 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dac96c0ad8c8b4b4e2d6a514303a256f51227ab2b3e6ec91286c8d39fb895b5f" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.566600 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-87a8-account-create-update-fbvt2" event={"ID":"366ad881-59b1-434f-b500-3cb185421ebe","Type":"ContainerDied","Data":"778d20ec3ec9d2fba91354829d4b72d791b579364e383a894e4536ab8e3d41fe"} Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.566619 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="778d20ec3ec9d2fba91354829d4b72d791b579364e383a894e4536ab8e3d41fe" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.587742 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17f2eea9-c362-4195-a8fc-8d392d045f4f-operator-scripts\") pod \"17f2eea9-c362-4195-a8fc-8d392d045f4f\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.587859 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmdcb\" (UniqueName: \"kubernetes.io/projected/17f2eea9-c362-4195-a8fc-8d392d045f4f-kube-api-access-bmdcb\") pod \"17f2eea9-c362-4195-a8fc-8d392d045f4f\" (UID: \"17f2eea9-c362-4195-a8fc-8d392d045f4f\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.588671 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17f2eea9-c362-4195-a8fc-8d392d045f4f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "17f2eea9-c362-4195-a8fc-8d392d045f4f" (UID: "17f2eea9-c362-4195-a8fc-8d392d045f4f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.593397 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17f2eea9-c362-4195-a8fc-8d392d045f4f-kube-api-access-bmdcb" (OuterVolumeSpecName: "kube-api-access-bmdcb") pod "17f2eea9-c362-4195-a8fc-8d392d045f4f" (UID: "17f2eea9-c362-4195-a8fc-8d392d045f4f"). InnerVolumeSpecName "kube-api-access-bmdcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.683939 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.692424 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17f2eea9-c362-4195-a8fc-8d392d045f4f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.692464 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmdcb\" (UniqueName: \"kubernetes.io/projected/17f2eea9-c362-4195-a8fc-8d392d045f4f-kube-api-access-bmdcb\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.724318 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.735180 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.744914 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.754803 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.766634 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.779054 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.789894 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.793273 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcq4d\" (UniqueName: \"kubernetes.io/projected/6acb750d-2221-421a-af1c-dfe569427350-kube-api-access-bcq4d\") pod \"6acb750d-2221-421a-af1c-dfe569427350\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.793356 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acb750d-2221-421a-af1c-dfe569427350-operator-scripts\") pod \"6acb750d-2221-421a-af1c-dfe569427350\" (UID: \"6acb750d-2221-421a-af1c-dfe569427350\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.794895 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6acb750d-2221-421a-af1c-dfe569427350-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6acb750d-2221-421a-af1c-dfe569427350" (UID: "6acb750d-2221-421a-af1c-dfe569427350"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.797034 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6acb750d-2221-421a-af1c-dfe569427350-kube-api-access-bcq4d" (OuterVolumeSpecName: "kube-api-access-bcq4d") pod "6acb750d-2221-421a-af1c-dfe569427350" (UID: "6acb750d-2221-421a-af1c-dfe569427350"). InnerVolumeSpecName "kube-api-access-bcq4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.894941 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgcls\" (UniqueName: \"kubernetes.io/projected/366ad881-59b1-434f-b500-3cb185421ebe-kube-api-access-jgcls\") pod \"366ad881-59b1-434f-b500-3cb185421ebe\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.894986 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltfvk\" (UniqueName: \"kubernetes.io/projected/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-kube-api-access-ltfvk\") pod \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895013 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-operator-scripts\") pod \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895042 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkh4l\" (UniqueName: \"kubernetes.io/projected/96ac9638-e157-43ba-b12c-96b502226293-kube-api-access-jkh4l\") pod \"96ac9638-e157-43ba-b12c-96b502226293\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895095 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqddp\" (UniqueName: \"kubernetes.io/projected/39d51baf-c92a-413f-9257-facc87ce7084-kube-api-access-zqddp\") pod \"39d51baf-c92a-413f-9257-facc87ce7084\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895132 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-combined-ca-bundle\") pod \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895183 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39d51baf-c92a-413f-9257-facc87ce7084-operator-scripts\") pod \"39d51baf-c92a-413f-9257-facc87ce7084\" (UID: \"39d51baf-c92a-413f-9257-facc87ce7084\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895201 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ac9638-e157-43ba-b12c-96b502226293-operator-scripts\") pod \"96ac9638-e157-43ba-b12c-96b502226293\" (UID: \"96ac9638-e157-43ba-b12c-96b502226293\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895239 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f80d3801-ef59-4680-9736-dc6d78d1b7d8-operator-scripts\") pod \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895277 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-config-data\") pod \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895321 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v79l8\" (UniqueName: \"kubernetes.io/projected/0c65caa0-11c0-4a4d-b58d-cb17efd01928-kube-api-access-v79l8\") pod \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895337 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwntc\" (UniqueName: \"kubernetes.io/projected/f80d3801-ef59-4680-9736-dc6d78d1b7d8-kube-api-access-fwntc\") pod \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\" (UID: \"f80d3801-ef59-4680-9736-dc6d78d1b7d8\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895364 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-244xq\" (UniqueName: \"kubernetes.io/projected/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-kube-api-access-244xq\") pod \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\" (UID: \"5ac4b779-c2dd-4da8-a15d-e3d8ad165510\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895398 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/366ad881-59b1-434f-b500-3cb185421ebe-operator-scripts\") pod \"366ad881-59b1-434f-b500-3cb185421ebe\" (UID: \"366ad881-59b1-434f-b500-3cb185421ebe\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895415 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-db-sync-config-data\") pod \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\" (UID: \"98cbdc65-cc24-4a81-899b-66de1d1a6ca3\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895463 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c65caa0-11c0-4a4d-b58d-cb17efd01928-operator-scripts\") pod \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\" (UID: \"0c65caa0-11c0-4a4d-b58d-cb17efd01928\") " Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895776 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39d51baf-c92a-413f-9257-facc87ce7084-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39d51baf-c92a-413f-9257-facc87ce7084" (UID: "39d51baf-c92a-413f-9257-facc87ce7084"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895827 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcq4d\" (UniqueName: \"kubernetes.io/projected/6acb750d-2221-421a-af1c-dfe569427350-kube-api-access-bcq4d\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.895843 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6acb750d-2221-421a-af1c-dfe569427350-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.896240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5ac4b779-c2dd-4da8-a15d-e3d8ad165510" (UID: "5ac4b779-c2dd-4da8-a15d-e3d8ad165510"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.896479 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c65caa0-11c0-4a4d-b58d-cb17efd01928-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0c65caa0-11c0-4a4d-b58d-cb17efd01928" (UID: "0c65caa0-11c0-4a4d-b58d-cb17efd01928"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.896914 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96ac9638-e157-43ba-b12c-96b502226293-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96ac9638-e157-43ba-b12c-96b502226293" (UID: "96ac9638-e157-43ba-b12c-96b502226293"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.897482 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/366ad881-59b1-434f-b500-3cb185421ebe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "366ad881-59b1-434f-b500-3cb185421ebe" (UID: "366ad881-59b1-434f-b500-3cb185421ebe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.899141 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f80d3801-ef59-4680-9736-dc6d78d1b7d8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f80d3801-ef59-4680-9736-dc6d78d1b7d8" (UID: "f80d3801-ef59-4680-9736-dc6d78d1b7d8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.904786 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/366ad881-59b1-434f-b500-3cb185421ebe-kube-api-access-jgcls" (OuterVolumeSpecName: "kube-api-access-jgcls") pod "366ad881-59b1-434f-b500-3cb185421ebe" (UID: "366ad881-59b1-434f-b500-3cb185421ebe"). InnerVolumeSpecName "kube-api-access-jgcls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.904963 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f80d3801-ef59-4680-9736-dc6d78d1b7d8-kube-api-access-fwntc" (OuterVolumeSpecName: "kube-api-access-fwntc") pod "f80d3801-ef59-4680-9736-dc6d78d1b7d8" (UID: "f80d3801-ef59-4680-9736-dc6d78d1b7d8"). InnerVolumeSpecName "kube-api-access-fwntc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.909146 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c65caa0-11c0-4a4d-b58d-cb17efd01928-kube-api-access-v79l8" (OuterVolumeSpecName: "kube-api-access-v79l8") pod "0c65caa0-11c0-4a4d-b58d-cb17efd01928" (UID: "0c65caa0-11c0-4a4d-b58d-cb17efd01928"). InnerVolumeSpecName "kube-api-access-v79l8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.910589 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96ac9638-e157-43ba-b12c-96b502226293-kube-api-access-jkh4l" (OuterVolumeSpecName: "kube-api-access-jkh4l") pod "96ac9638-e157-43ba-b12c-96b502226293" (UID: "96ac9638-e157-43ba-b12c-96b502226293"). InnerVolumeSpecName "kube-api-access-jkh4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.916240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-kube-api-access-244xq" (OuterVolumeSpecName: "kube-api-access-244xq") pod "5ac4b779-c2dd-4da8-a15d-e3d8ad165510" (UID: "5ac4b779-c2dd-4da8-a15d-e3d8ad165510"). InnerVolumeSpecName "kube-api-access-244xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.916315 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "98cbdc65-cc24-4a81-899b-66de1d1a6ca3" (UID: "98cbdc65-cc24-4a81-899b-66de1d1a6ca3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.916671 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-kube-api-access-ltfvk" (OuterVolumeSpecName: "kube-api-access-ltfvk") pod "98cbdc65-cc24-4a81-899b-66de1d1a6ca3" (UID: "98cbdc65-cc24-4a81-899b-66de1d1a6ca3"). InnerVolumeSpecName "kube-api-access-ltfvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.916990 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39d51baf-c92a-413f-9257-facc87ce7084-kube-api-access-zqddp" (OuterVolumeSpecName: "kube-api-access-zqddp") pod "39d51baf-c92a-413f-9257-facc87ce7084" (UID: "39d51baf-c92a-413f-9257-facc87ce7084"). InnerVolumeSpecName "kube-api-access-zqddp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.932304 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98cbdc65-cc24-4a81-899b-66de1d1a6ca3" (UID: "98cbdc65-cc24-4a81-899b-66de1d1a6ca3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.957701 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-config-data" (OuterVolumeSpecName: "config-data") pod "98cbdc65-cc24-4a81-899b-66de1d1a6ca3" (UID: "98cbdc65-cc24-4a81-899b-66de1d1a6ca3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998118 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-244xq\" (UniqueName: \"kubernetes.io/projected/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-kube-api-access-244xq\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998188 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/366ad881-59b1-434f-b500-3cb185421ebe-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998211 4792 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998231 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c65caa0-11c0-4a4d-b58d-cb17efd01928-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998256 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgcls\" (UniqueName: \"kubernetes.io/projected/366ad881-59b1-434f-b500-3cb185421ebe-kube-api-access-jgcls\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998279 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltfvk\" (UniqueName: \"kubernetes.io/projected/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-kube-api-access-ltfvk\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998301 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ac4b779-c2dd-4da8-a15d-e3d8ad165510-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998324 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkh4l\" (UniqueName: \"kubernetes.io/projected/96ac9638-e157-43ba-b12c-96b502226293-kube-api-access-jkh4l\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998345 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqddp\" (UniqueName: \"kubernetes.io/projected/39d51baf-c92a-413f-9257-facc87ce7084-kube-api-access-zqddp\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998366 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998387 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39d51baf-c92a-413f-9257-facc87ce7084-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998409 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96ac9638-e157-43ba-b12c-96b502226293-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998431 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f80d3801-ef59-4680-9736-dc6d78d1b7d8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998455 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98cbdc65-cc24-4a81-899b-66de1d1a6ca3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998478 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v79l8\" (UniqueName: \"kubernetes.io/projected/0c65caa0-11c0-4a4d-b58d-cb17efd01928-kube-api-access-v79l8\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:46 crc kubenswrapper[4792]: I1202 18:57:46.998500 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwntc\" (UniqueName: \"kubernetes.io/projected/f80d3801-ef59-4680-9736-dc6d78d1b7d8-kube-api-access-fwntc\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.584916 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"252fab2e-fcb7-43e8-940a-48adc8f4ebd5","Type":"ContainerStarted","Data":"c4cd7f36ebf432cdcf1463aa226ea89d1a8ef0955cb6135bc2be68eecc9b3983"} Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.588361 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5d36d473-c89a-496a-ab27-d65535edb0ec","Type":"ContainerStarted","Data":"991a5aafabb04bbe66bc9254a02891f9a44d4849bc160933eaa493a1cbc2bfca"} Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.591408 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1802-account-create-update-tfzgc" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.591408 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-wh2n2" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.591464 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jj9jh" event={"ID":"a662d52b-8774-40a9-b965-ca41d5f1c6c4","Type":"ContainerStarted","Data":"2b93d2eca1ab24fabcc6fd23b34db4f46d3f9d28d326481b9014b1ca3d52056c"} Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.591554 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-26zbd" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.591588 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-h2qp6" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.591982 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8a45-account-create-update-rvtjj" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.593285 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87a8-account-create-update-fbvt2" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.593318 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4svmb" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.593296 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-ebea-account-create-update-fq5wk" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.657343 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=45.854439451 podStartE2EDuration="53.657321449s" podCreationTimestamp="2025-12-02 18:56:54 +0000 UTC" firstStartedPulling="2025-12-02 18:57:34.326154224 +0000 UTC m=+1285.099046552" lastFinishedPulling="2025-12-02 18:57:42.129036222 +0000 UTC m=+1292.901928550" observedRunningTime="2025-12-02 18:57:47.633495132 +0000 UTC m=+1298.406387490" watchObservedRunningTime="2025-12-02 18:57:47.657321449 +0000 UTC m=+1298.430213777" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.660917 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-jj9jh" podStartSLOduration=3.5100500439999998 podStartE2EDuration="8.660902182s" podCreationTimestamp="2025-12-02 18:57:39 +0000 UTC" firstStartedPulling="2025-12-02 18:57:41.199160007 +0000 UTC m=+1291.972052335" lastFinishedPulling="2025-12-02 18:57:46.350012145 +0000 UTC m=+1297.122904473" observedRunningTime="2025-12-02 18:57:47.653786658 +0000 UTC m=+1298.426678996" watchObservedRunningTime="2025-12-02 18:57:47.660902182 +0000 UTC m=+1298.433794510" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961351 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-9fjng"] Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961717 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c65caa0-11c0-4a4d-b58d-cb17efd01928" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961733 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c65caa0-11c0-4a4d-b58d-cb17efd01928" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961746 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98cbdc65-cc24-4a81-899b-66de1d1a6ca3" containerName="glance-db-sync" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961753 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="98cbdc65-cc24-4a81-899b-66de1d1a6ca3" containerName="glance-db-sync" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961767 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f80d3801-ef59-4680-9736-dc6d78d1b7d8" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961774 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f80d3801-ef59-4680-9736-dc6d78d1b7d8" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961783 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac4b779-c2dd-4da8-a15d-e3d8ad165510" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961789 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac4b779-c2dd-4da8-a15d-e3d8ad165510" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961803 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="366ad881-59b1-434f-b500-3cb185421ebe" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961808 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="366ad881-59b1-434f-b500-3cb185421ebe" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961818 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39d51baf-c92a-413f-9257-facc87ce7084" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961824 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="39d51baf-c92a-413f-9257-facc87ce7084" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961841 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6acb750d-2221-421a-af1c-dfe569427350" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961847 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6acb750d-2221-421a-af1c-dfe569427350" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961858 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ac9638-e157-43ba-b12c-96b502226293" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961864 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ac9638-e157-43ba-b12c-96b502226293" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: E1202 18:57:47.961875 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17f2eea9-c362-4195-a8fc-8d392d045f4f" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.961881 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17f2eea9-c362-4195-a8fc-8d392d045f4f" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962029 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ac9638-e157-43ba-b12c-96b502226293" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962045 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="366ad881-59b1-434f-b500-3cb185421ebe" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962059 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c65caa0-11c0-4a4d-b58d-cb17efd01928" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962072 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="39d51baf-c92a-413f-9257-facc87ce7084" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962081 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6acb750d-2221-421a-af1c-dfe569427350" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962092 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f80d3801-ef59-4680-9736-dc6d78d1b7d8" containerName="mariadb-account-create-update" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962103 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="17f2eea9-c362-4195-a8fc-8d392d045f4f" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962111 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="98cbdc65-cc24-4a81-899b-66de1d1a6ca3" containerName="glance-db-sync" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.962119 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac4b779-c2dd-4da8-a15d-e3d8ad165510" containerName="mariadb-database-create" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.963032 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.968191 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 02 18:57:47 crc kubenswrapper[4792]: I1202 18:57:47.971016 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-9fjng"] Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.126929 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.126999 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-config\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.127041 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.127062 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt5rg\" (UniqueName: \"kubernetes.io/projected/bf3c7b08-f505-4d22-a0fa-7225d1a77808-kube-api-access-zt5rg\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.127196 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.127363 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-svc\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.132633 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-9fjng"] Dec 02 18:57:48 crc kubenswrapper[4792]: E1202 18:57:48.133266 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-zt5rg ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-764c5664d7-9fjng" podUID="bf3c7b08-f505-4d22-a0fa-7225d1a77808" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.175389 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-7f9pb"] Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.177507 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.187316 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-7f9pb"] Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.229190 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.229261 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-config\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.229310 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.229332 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt5rg\" (UniqueName: \"kubernetes.io/projected/bf3c7b08-f505-4d22-a0fa-7225d1a77808-kube-api-access-zt5rg\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.229386 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.229456 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-svc\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.230365 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.230507 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.230671 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-svc\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.230966 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.230971 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-config\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.250669 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt5rg\" (UniqueName: \"kubernetes.io/projected/bf3c7b08-f505-4d22-a0fa-7225d1a77808-kube-api-access-zt5rg\") pod \"dnsmasq-dns-764c5664d7-9fjng\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.331178 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-config\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.331268 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.331306 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2btlt\" (UniqueName: \"kubernetes.io/projected/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-kube-api-access-2btlt\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.331389 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.331421 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.331439 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.433592 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.433676 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.433701 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.433795 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-config\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.433849 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.433871 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2btlt\" (UniqueName: \"kubernetes.io/projected/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-kube-api-access-2btlt\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.435426 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.435447 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.435427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-config\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.435636 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.436428 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.470506 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2btlt\" (UniqueName: \"kubernetes.io/projected/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-kube-api-access-2btlt\") pod \"dnsmasq-dns-74f6bcbc87-7f9pb\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.494201 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.605629 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.626670 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.738721 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-swift-storage-0\") pod \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739162 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-sb\") pod \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739192 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-svc\") pod \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739286 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zt5rg\" (UniqueName: \"kubernetes.io/projected/bf3c7b08-f505-4d22-a0fa-7225d1a77808-kube-api-access-zt5rg\") pod \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739348 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-nb\") pod \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739379 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-config\") pod \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\" (UID: \"bf3c7b08-f505-4d22-a0fa-7225d1a77808\") " Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739410 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bf3c7b08-f505-4d22-a0fa-7225d1a77808" (UID: "bf3c7b08-f505-4d22-a0fa-7225d1a77808"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739587 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bf3c7b08-f505-4d22-a0fa-7225d1a77808" (UID: "bf3c7b08-f505-4d22-a0fa-7225d1a77808"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739938 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bf3c7b08-f505-4d22-a0fa-7225d1a77808" (UID: "bf3c7b08-f505-4d22-a0fa-7225d1a77808"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.739933 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bf3c7b08-f505-4d22-a0fa-7225d1a77808" (UID: "bf3c7b08-f505-4d22-a0fa-7225d1a77808"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.740178 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.740190 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.740199 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.740208 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.740489 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-config" (OuterVolumeSpecName: "config") pod "bf3c7b08-f505-4d22-a0fa-7225d1a77808" (UID: "bf3c7b08-f505-4d22-a0fa-7225d1a77808"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.745252 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf3c7b08-f505-4d22-a0fa-7225d1a77808-kube-api-access-zt5rg" (OuterVolumeSpecName: "kube-api-access-zt5rg") pod "bf3c7b08-f505-4d22-a0fa-7225d1a77808" (UID: "bf3c7b08-f505-4d22-a0fa-7225d1a77808"). InnerVolumeSpecName "kube-api-access-zt5rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.841447 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zt5rg\" (UniqueName: \"kubernetes.io/projected/bf3c7b08-f505-4d22-a0fa-7225d1a77808-kube-api-access-zt5rg\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:48 crc kubenswrapper[4792]: I1202 18:57:48.841495 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf3c7b08-f505-4d22-a0fa-7225d1a77808-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.028553 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-7f9pb"] Dec 02 18:57:49 crc kubenswrapper[4792]: W1202 18:57:49.047809 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae2a5fe7_e4fc_4e75_ada1_7dad7db02f97.slice/crio-3a3398014a03a6fb20ff52f4befdf26c71bc6fe24dd7dc13e970ec31c022fb53 WatchSource:0}: Error finding container 3a3398014a03a6fb20ff52f4befdf26c71bc6fe24dd7dc13e970ec31c022fb53: Status 404 returned error can't find the container with id 3a3398014a03a6fb20ff52f4befdf26c71bc6fe24dd7dc13e970ec31c022fb53 Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.618884 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5d36d473-c89a-496a-ab27-d65535edb0ec","Type":"ContainerStarted","Data":"70d0362bdaa143503a8c449cbe04010c685e1444061bdad99c38b78367b626d8"} Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.621021 4792 generic.go:334] "Generic (PLEG): container finished" podID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerID="093c323122e6af23f926d255de7b34a4b91eb0b3e50ca308ff28a77d07ec2074" exitCode=0 Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.621097 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" event={"ID":"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97","Type":"ContainerDied","Data":"093c323122e6af23f926d255de7b34a4b91eb0b3e50ca308ff28a77d07ec2074"} Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.621156 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-9fjng" Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.621167 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" event={"ID":"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97","Type":"ContainerStarted","Data":"3a3398014a03a6fb20ff52f4befdf26c71bc6fe24dd7dc13e970ec31c022fb53"} Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.793888 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-9fjng"] Dec 02 18:57:49 crc kubenswrapper[4792]: I1202 18:57:49.802819 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-9fjng"] Dec 02 18:57:50 crc kubenswrapper[4792]: I1202 18:57:50.636196 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5d36d473-c89a-496a-ab27-d65535edb0ec","Type":"ContainerStarted","Data":"72e46a40f37b5117ec8c9f9385cb51c9b54dd7bbc9e57b3756b6ad65a22aac7a"} Dec 02 18:57:50 crc kubenswrapper[4792]: I1202 18:57:50.639394 4792 generic.go:334] "Generic (PLEG): container finished" podID="a662d52b-8774-40a9-b965-ca41d5f1c6c4" containerID="2b93d2eca1ab24fabcc6fd23b34db4f46d3f9d28d326481b9014b1ca3d52056c" exitCode=0 Dec 02 18:57:50 crc kubenswrapper[4792]: I1202 18:57:50.639483 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jj9jh" event={"ID":"a662d52b-8774-40a9-b965-ca41d5f1c6c4","Type":"ContainerDied","Data":"2b93d2eca1ab24fabcc6fd23b34db4f46d3f9d28d326481b9014b1ca3d52056c"} Dec 02 18:57:50 crc kubenswrapper[4792]: I1202 18:57:50.642492 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" event={"ID":"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97","Type":"ContainerStarted","Data":"f3346e6b67588344f2ab3cab3754cc0355ebb8c01b1e0bcf6628832a9b6905b4"} Dec 02 18:57:50 crc kubenswrapper[4792]: I1202 18:57:50.643400 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:50 crc kubenswrapper[4792]: I1202 18:57:50.697655 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=24.69762256 podStartE2EDuration="24.69762256s" podCreationTimestamp="2025-12-02 18:57:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:50.684829148 +0000 UTC m=+1301.457721476" watchObservedRunningTime="2025-12-02 18:57:50.69762256 +0000 UTC m=+1301.470514928" Dec 02 18:57:50 crc kubenswrapper[4792]: I1202 18:57:50.723319 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" podStartSLOduration=2.7233036349999997 podStartE2EDuration="2.723303635s" podCreationTimestamp="2025-12-02 18:57:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:50.722398822 +0000 UTC m=+1301.495291200" watchObservedRunningTime="2025-12-02 18:57:50.723303635 +0000 UTC m=+1301.496195963" Dec 02 18:57:51 crc kubenswrapper[4792]: E1202 18:57:51.158364 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f2d96d0_f671_46cd_8e95_162a0773470d.slice\": RecentStats: unable to find data in memory cache]" Dec 02 18:57:51 crc kubenswrapper[4792]: I1202 18:57:51.559554 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf3c7b08-f505-4d22-a0fa-7225d1a77808" path="/var/lib/kubelet/pods/bf3c7b08-f505-4d22-a0fa-7225d1a77808/volumes" Dec 02 18:57:51 crc kubenswrapper[4792]: I1202 18:57:51.840602 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.021935 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.146370 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-combined-ca-bundle\") pod \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.146451 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-config-data\") pod \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.146675 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9h8q\" (UniqueName: \"kubernetes.io/projected/a662d52b-8774-40a9-b965-ca41d5f1c6c4-kube-api-access-r9h8q\") pod \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\" (UID: \"a662d52b-8774-40a9-b965-ca41d5f1c6c4\") " Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.151984 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a662d52b-8774-40a9-b965-ca41d5f1c6c4-kube-api-access-r9h8q" (OuterVolumeSpecName: "kube-api-access-r9h8q") pod "a662d52b-8774-40a9-b965-ca41d5f1c6c4" (UID: "a662d52b-8774-40a9-b965-ca41d5f1c6c4"). InnerVolumeSpecName "kube-api-access-r9h8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.184624 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a662d52b-8774-40a9-b965-ca41d5f1c6c4" (UID: "a662d52b-8774-40a9-b965-ca41d5f1c6c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.198165 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-config-data" (OuterVolumeSpecName: "config-data") pod "a662d52b-8774-40a9-b965-ca41d5f1c6c4" (UID: "a662d52b-8774-40a9-b965-ca41d5f1c6c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.249342 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9h8q\" (UniqueName: \"kubernetes.io/projected/a662d52b-8774-40a9-b965-ca41d5f1c6c4-kube-api-access-r9h8q\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.249379 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.249393 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a662d52b-8774-40a9-b965-ca41d5f1c6c4-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.674356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-jj9jh" event={"ID":"a662d52b-8774-40a9-b965-ca41d5f1c6c4","Type":"ContainerDied","Data":"6349ae0077f008786051e935f2ccf63875e2164e5bb254f0eb0c137c4ab1287d"} Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.674749 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6349ae0077f008786051e935f2ccf63875e2164e5bb254f0eb0c137c4ab1287d" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.674631 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-jj9jh" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.915917 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-7f9pb"] Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.929567 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-hxh97"] Dec 02 18:57:52 crc kubenswrapper[4792]: E1202 18:57:52.930010 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a662d52b-8774-40a9-b965-ca41d5f1c6c4" containerName="keystone-db-sync" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.930029 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a662d52b-8774-40a9-b965-ca41d5f1c6c4" containerName="keystone-db-sync" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.930239 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a662d52b-8774-40a9-b965-ca41d5f1c6c4" containerName="keystone-db-sync" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.930956 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.938192 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.939330 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phdxk" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.939581 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.939724 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.946612 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-2rdm4"] Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.948139 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.948360 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.958946 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hxh97"] Dec 02 18:57:52 crc kubenswrapper[4792]: I1202 18:57:52.970624 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-2rdm4"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064020 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-svc\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064133 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2lqx\" (UniqueName: \"kubernetes.io/projected/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-kube-api-access-h2lqx\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064160 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-combined-ca-bundle\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064191 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-config\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064263 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-fernet-keys\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064283 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-config-data\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064298 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064324 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lckws\" (UniqueName: \"kubernetes.io/projected/5d07f699-63a8-4c42-a0d9-3481700d8a20-kube-api-access-lckws\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064430 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-credential-keys\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064614 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064648 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-scripts\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.064729 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.129936 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-t5xgj"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.131907 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.135619 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.135907 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-b8gkz" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.136019 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-zmnrf"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.137327 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.137849 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.145113 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.145358 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.145637 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-t5xgj"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.146193 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-tsgx2" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.161547 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zmnrf"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166297 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-fernet-keys\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166333 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-config-data\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166353 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lckws\" (UniqueName: \"kubernetes.io/projected/5d07f699-63a8-4c42-a0d9-3481700d8a20-kube-api-access-lckws\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-credential-keys\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166445 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166462 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-scripts\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166560 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166590 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-svc\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166618 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2lqx\" (UniqueName: \"kubernetes.io/projected/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-kube-api-access-h2lqx\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166639 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-combined-ca-bundle\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.166662 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-config\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.167713 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-config\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.168242 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.168884 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.169660 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-svc\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.170199 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.175649 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-scripts\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.175884 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-combined-ca-bundle\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.177113 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-config-data\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.178176 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-fernet-keys\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.186093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-credential-keys\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.198638 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2lqx\" (UniqueName: \"kubernetes.io/projected/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-kube-api-access-h2lqx\") pod \"keystone-bootstrap-hxh97\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.206203 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lckws\" (UniqueName: \"kubernetes.io/projected/5d07f699-63a8-4c42-a0d9-3481700d8a20-kube-api-access-lckws\") pod \"dnsmasq-dns-847c4cc679-2rdm4\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.240877 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-wh2hr"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.241983 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.244986 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.245149 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-qndk8" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.245281 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.245338 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.248687 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.272946 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274221 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwqvz\" (UniqueName: \"kubernetes.io/projected/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-kube-api-access-wwqvz\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274316 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-db-sync-config-data\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274391 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hks82\" (UniqueName: \"kubernetes.io/projected/13ec79b9-9006-473e-a8c9-e0cc9069d983-kube-api-access-hks82\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274473 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-etc-machine-id\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274597 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-combined-ca-bundle\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274706 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-config\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274830 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-scripts\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274889 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-combined-ca-bundle\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.274946 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-config-data\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.278902 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-wh2hr"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.355585 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.357730 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.371174 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-dc5j7"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.377970 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-etc-machine-id\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378037 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km57l\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-kube-api-access-km57l\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378063 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-combined-ca-bundle\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378092 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-combined-ca-bundle\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378114 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-config\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378178 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-scripts\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-combined-ca-bundle\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378229 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-scripts\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378247 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-config-data\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378273 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-certs\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378295 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwqvz\" (UniqueName: \"kubernetes.io/projected/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-kube-api-access-wwqvz\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378313 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-db-sync-config-data\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378345 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-config-data\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378361 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hks82\" (UniqueName: \"kubernetes.io/projected/13ec79b9-9006-473e-a8c9-e0cc9069d983-kube-api-access-hks82\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378462 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.378931 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-etc-machine-id\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.385192 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-combined-ca-bundle\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.385932 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.386136 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.390134 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-config-data\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.396439 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-db-sync-config-data\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.398686 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-scripts\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.398831 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-config\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.401508 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-combined-ca-bundle\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.418393 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vntnf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.432847 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.438012 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hks82\" (UniqueName: \"kubernetes.io/projected/13ec79b9-9006-473e-a8c9-e0cc9069d983-kube-api-access-hks82\") pod \"neutron-db-sync-zmnrf\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.447092 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwqvz\" (UniqueName: \"kubernetes.io/projected/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-kube-api-access-wwqvz\") pod \"cinder-db-sync-t5xgj\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.449164 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-dc5j7"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.468158 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.480815 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.481833 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czpkc\" (UniqueName: \"kubernetes.io/projected/bdde8f20-2325-4180-85b9-72a2b2fefe9f-kube-api-access-czpkc\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.481855 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-scripts\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.481891 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.481932 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-scripts\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.481963 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-run-httpd\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.481980 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-config-data\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.481994 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-combined-ca-bundle\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-certs\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482049 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-config-data\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482070 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482093 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-db-sync-config-data\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482108 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlhvk\" (UniqueName: \"kubernetes.io/projected/477f2280-d198-47f0-8f7a-cd76106d9f35-kube-api-access-dlhvk\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482126 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-log-httpd\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482159 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km57l\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-kube-api-access-km57l\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.482177 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-combined-ca-bundle\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.489803 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-combined-ca-bundle\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.490360 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.491620 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-certs\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.492999 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-scripts\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.501699 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-config-data\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.526245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km57l\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-kube-api-access-km57l\") pod \"cloudkitty-db-sync-wh2hr\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592388 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-run-httpd\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592418 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-combined-ca-bundle\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592435 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-config-data\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592482 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592497 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-db-sync-config-data\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592512 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlhvk\" (UniqueName: \"kubernetes.io/projected/477f2280-d198-47f0-8f7a-cd76106d9f35-kube-api-access-dlhvk\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592545 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-log-httpd\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592602 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czpkc\" (UniqueName: \"kubernetes.io/projected/bdde8f20-2325-4180-85b9-72a2b2fefe9f-kube-api-access-czpkc\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592618 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-scripts\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.592650 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.606328 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-run-httpd\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.606952 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-log-httpd\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.612836 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-587gx"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.616696 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.630698 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czpkc\" (UniqueName: \"kubernetes.io/projected/bdde8f20-2325-4180-85b9-72a2b2fefe9f-kube-api-access-czpkc\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.637342 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlhvk\" (UniqueName: \"kubernetes.io/projected/477f2280-d198-47f0-8f7a-cd76106d9f35-kube-api-access-dlhvk\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.640463 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.645941 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-db-sync-config-data\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.659784 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-combined-ca-bundle\") pod \"barbican-db-sync-dc5j7\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.660517 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-scripts\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.669403 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.670314 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-config-data\") pod \"ceilometer-0\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.679709 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-2rdm4"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.679873 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.683008 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p2hl6" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.683275 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.683396 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.699869 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" podUID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerName="dnsmasq-dns" containerID="cri-o://f3346e6b67588344f2ab3cab3754cc0355ebb8c01b1e0bcf6628832a9b6905b4" gracePeriod=10 Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.718401 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-587gx"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.748016 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pc89n"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.761897 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.778107 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pc89n"] Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.805924 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.805972 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.805986 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-config-data\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806018 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806079 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-combined-ca-bundle\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806139 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxh2k\" (UniqueName: \"kubernetes.io/projected/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-kube-api-access-pxh2k\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806160 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806198 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrrqv\" (UniqueName: \"kubernetes.io/projected/617ad28d-5e59-4407-91e7-740824d3ce43-kube-api-access-qrrqv\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806213 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806254 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/617ad28d-5e59-4407-91e7-740824d3ce43-logs\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806272 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-config\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.806290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-scripts\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.860804 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.910101 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911341 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-config-data\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911433 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911495 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-combined-ca-bundle\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911581 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxh2k\" (UniqueName: \"kubernetes.io/projected/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-kube-api-access-pxh2k\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911651 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrrqv\" (UniqueName: \"kubernetes.io/projected/617ad28d-5e59-4407-91e7-740824d3ce43-kube-api-access-qrrqv\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911666 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911706 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/617ad28d-5e59-4407-91e7-740824d3ce43-logs\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911726 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-config\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.911742 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-scripts\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.912887 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.915506 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.916204 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.917779 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-config\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.930758 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/617ad28d-5e59-4407-91e7-740824d3ce43-logs\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.937252 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-combined-ca-bundle\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.942318 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxh2k\" (UniqueName: \"kubernetes.io/projected/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-kube-api-access-pxh2k\") pod \"dnsmasq-dns-785d8bcb8c-pc89n\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.943017 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-config-data\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.946911 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrrqv\" (UniqueName: \"kubernetes.io/projected/617ad28d-5e59-4407-91e7-740824d3ce43-kube-api-access-qrrqv\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:53 crc kubenswrapper[4792]: I1202 18:57:53.955457 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-scripts\") pod \"placement-db-sync-587gx\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " pod="openstack/placement-db-sync-587gx" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.019498 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-587gx" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.074481 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.076220 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.080338 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.080678 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.080905 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6m7wg" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.083699 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.095842 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hxh97"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.111839 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.118944 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.118992 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.119069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp6jx\" (UniqueName: \"kubernetes.io/projected/cb410397-57d4-441b-b612-0f05926cc480-kube-api-access-wp6jx\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.119111 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.119151 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-logs\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.119196 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.119224 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-scripts\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.119260 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-config-data\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.120756 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.145636 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.147272 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.149920 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.150002 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.156310 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223040 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223321 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223343 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223373 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223393 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-scripts\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223434 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw49l\" (UniqueName: \"kubernetes.io/projected/b5005838-4f7e-4571-be44-5a07b3746f37-kube-api-access-jw49l\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223459 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223486 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-config-data\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223534 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223558 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223588 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223634 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223658 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp6jx\" (UniqueName: \"kubernetes.io/projected/cb410397-57d4-441b-b612-0f05926cc480-kube-api-access-wp6jx\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-logs\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223721 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.223745 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-logs\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.224245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-logs\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.225026 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.234166 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-config-data\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.237078 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.237402 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.237458 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7340b972542f32c74446fd2d8820f10387f1320bf84336609d21f0bd206378e7/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.237704 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.256146 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-scripts\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.264000 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp6jx\" (UniqueName: \"kubernetes.io/projected/cb410397-57d4-441b-b612-0f05926cc480-kube-api-access-wp6jx\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.272137 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-2rdm4"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.299430 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.330845 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.330888 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.330904 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.330940 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw49l\" (UniqueName: \"kubernetes.io/projected/b5005838-4f7e-4571-be44-5a07b3746f37-kube-api-access-jw49l\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.330957 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.333606 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.333685 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.333729 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-logs\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.334320 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-logs\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.334546 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.340632 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.344984 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.345069 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/80e64c4b1f5f631bf26b32fa972d35244bcdcbb9d2d00ddd0ab5edaa6a730928/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.349385 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.349577 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.354057 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.380137 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw49l\" (UniqueName: \"kubernetes.io/projected/b5005838-4f7e-4571-be44-5a07b3746f37-kube-api-access-jw49l\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.395715 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.431398 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.568318 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-t5xgj"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.612249 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-wh2hr"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.635908 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zmnrf"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.658143 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-dc5j7"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.666389 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.711448 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" event={"ID":"5d07f699-63a8-4c42-a0d9-3481700d8a20","Type":"ContainerStarted","Data":"6732ef9e7f462133f468fb2b17a8319fb72e8609a15c96dab81ab8cae2595783"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.715636 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wh2hr" event={"ID":"3df2077b-8a01-47ae-ad22-abfc02071c24","Type":"ContainerStarted","Data":"48f282c063107b45771188d8b610cc5864a27c291988453d1d2346276a04253a"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.719485 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zmnrf" event={"ID":"13ec79b9-9006-473e-a8c9-e0cc9069d983","Type":"ContainerStarted","Data":"5a6ca6eba2140733712ebd357afde1be644fb48b4049983310adb389737be8a3"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.723488 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hxh97" event={"ID":"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa","Type":"ContainerStarted","Data":"4ff1759a6625a7b333fdb8b969b00f2a621ecea333db2b9dda259838d75df17d"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.729224 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.730732 4792 generic.go:334] "Generic (PLEG): container finished" podID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerID="f3346e6b67588344f2ab3cab3754cc0355ebb8c01b1e0bcf6628832a9b6905b4" exitCode=0 Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.730847 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" event={"ID":"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97","Type":"ContainerDied","Data":"f3346e6b67588344f2ab3cab3754cc0355ebb8c01b1e0bcf6628832a9b6905b4"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.738036 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dc5j7" event={"ID":"477f2280-d198-47f0-8f7a-cd76106d9f35","Type":"ContainerStarted","Data":"22619db8e9ad3a2969455359d4404e955863d6429ad6b597ddaf2de2359fb44d"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.740431 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerStarted","Data":"914fcd4638c39e6fc6fa59a75a0bd773febe5625ba4e542b1afa29ec76574e69"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.741992 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-t5xgj" event={"ID":"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5","Type":"ContainerStarted","Data":"a07d2672e64572a33a45e5d32e8e54e381e3ba86d071dab5c9b0a9eabf92fa1b"} Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.865982 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-587gx"] Dec 02 18:57:54 crc kubenswrapper[4792]: I1202 18:57:54.976031 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pc89n"] Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.058589 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.168739 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-svc\") pod \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.169112 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-swift-storage-0\") pod \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.169235 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-sb\") pod \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.169295 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-nb\") pod \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.169328 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-config\") pod \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.169344 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2btlt\" (UniqueName: \"kubernetes.io/projected/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-kube-api-access-2btlt\") pod \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\" (UID: \"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97\") " Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.191679 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-kube-api-access-2btlt" (OuterVolumeSpecName: "kube-api-access-2btlt") pod "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" (UID: "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97"). InnerVolumeSpecName "kube-api-access-2btlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.271794 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2btlt\" (UniqueName: \"kubernetes.io/projected/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-kube-api-access-2btlt\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.323695 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-config" (OuterVolumeSpecName: "config") pod "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" (UID: "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.327668 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.347606 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" (UID: "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.354222 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" (UID: "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.367338 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" (UID: "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.374095 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.374149 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.374165 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.374178 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.384218 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" (UID: "ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.476421 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.670496 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.777425 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.794330 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.810578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-587gx" event={"ID":"617ad28d-5e59-4407-91e7-740824d3ce43","Type":"ContainerStarted","Data":"e8cf3ddf89718d86c828fa432a12e73fbf817a24fb7fd92fb84ab47cee966a19"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.814415 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hxh97" event={"ID":"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa","Type":"ContainerStarted","Data":"878a6446d1a442e6e2bca3c1e5afff1787a2da6fa21d22591aa8c78d44356b7f"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.818716 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zmnrf" event={"ID":"13ec79b9-9006-473e-a8c9-e0cc9069d983","Type":"ContainerStarted","Data":"2345285abe3985cfea4ef46be28ca339b0996bf16b15db21a8d1c12e43c278b8"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.824492 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" event={"ID":"ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97","Type":"ContainerDied","Data":"3a3398014a03a6fb20ff52f4befdf26c71bc6fe24dd7dc13e970ec31c022fb53"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.824746 4792 scope.go:117] "RemoveContainer" containerID="f3346e6b67588344f2ab3cab3754cc0355ebb8c01b1e0bcf6628832a9b6905b4" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.825071 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-7f9pb" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.827785 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b5005838-4f7e-4571-be44-5a07b3746f37","Type":"ContainerStarted","Data":"200b50367f8dfb6b50e51fff7d0d7412b0054114929bc0bc2ca6292181ef2576"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.837340 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-hxh97" podStartSLOduration=3.837292348 podStartE2EDuration="3.837292348s" podCreationTimestamp="2025-12-02 18:57:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:55.827971676 +0000 UTC m=+1306.600864004" watchObservedRunningTime="2025-12-02 18:57:55.837292348 +0000 UTC m=+1306.610184676" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.854491 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-zmnrf" podStartSLOduration=2.854476403 podStartE2EDuration="2.854476403s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:55.848294723 +0000 UTC m=+1306.621187051" watchObservedRunningTime="2025-12-02 18:57:55.854476403 +0000 UTC m=+1306.627368731" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.866473 4792 generic.go:334] "Generic (PLEG): container finished" podID="5d07f699-63a8-4c42-a0d9-3481700d8a20" containerID="3ce0c7d9b59e9e36a6a00f8c0133df463df3cfb100c5524fb303ee75b80538d5" exitCode=0 Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.866559 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" event={"ID":"5d07f699-63a8-4c42-a0d9-3481700d8a20","Type":"ContainerDied","Data":"3ce0c7d9b59e9e36a6a00f8c0133df463df3cfb100c5524fb303ee75b80538d5"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.869414 4792 generic.go:334] "Generic (PLEG): container finished" podID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerID="51c51df0c3cf78cb79ba7c4fd881c56b3ef2efed97e3ab1001a7877c11fdf5f5" exitCode=0 Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.869451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" event={"ID":"cc372579-8ed3-41c8-9a89-6e3c026e3d6a","Type":"ContainerDied","Data":"51c51df0c3cf78cb79ba7c4fd881c56b3ef2efed97e3ab1001a7877c11fdf5f5"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.869473 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" event={"ID":"cc372579-8ed3-41c8-9a89-6e3c026e3d6a","Type":"ContainerStarted","Data":"5467788e15fb6445e5aae87990e4fb350b1a270d25b7352000db7b55f5934dc3"} Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.870311 4792 scope.go:117] "RemoveContainer" containerID="093c323122e6af23f926d255de7b34a4b91eb0b3e50ca308ff28a77d07ec2074" Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.890892 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-7f9pb"] Dec 02 18:57:55 crc kubenswrapper[4792]: I1202 18:57:55.906878 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-7f9pb"] Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.248078 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.533737 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.608435 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-swift-storage-0\") pod \"5d07f699-63a8-4c42-a0d9-3481700d8a20\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.608513 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lckws\" (UniqueName: \"kubernetes.io/projected/5d07f699-63a8-4c42-a0d9-3481700d8a20-kube-api-access-lckws\") pod \"5d07f699-63a8-4c42-a0d9-3481700d8a20\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.608611 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-svc\") pod \"5d07f699-63a8-4c42-a0d9-3481700d8a20\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.608653 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-config\") pod \"5d07f699-63a8-4c42-a0d9-3481700d8a20\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.608681 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-nb\") pod \"5d07f699-63a8-4c42-a0d9-3481700d8a20\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.608739 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb\") pod \"5d07f699-63a8-4c42-a0d9-3481700d8a20\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.626019 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d07f699-63a8-4c42-a0d9-3481700d8a20-kube-api-access-lckws" (OuterVolumeSpecName: "kube-api-access-lckws") pod "5d07f699-63a8-4c42-a0d9-3481700d8a20" (UID: "5d07f699-63a8-4c42-a0d9-3481700d8a20"). InnerVolumeSpecName "kube-api-access-lckws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.643573 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-config" (OuterVolumeSpecName: "config") pod "5d07f699-63a8-4c42-a0d9-3481700d8a20" (UID: "5d07f699-63a8-4c42-a0d9-3481700d8a20"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.653763 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5d07f699-63a8-4c42-a0d9-3481700d8a20" (UID: "5d07f699-63a8-4c42-a0d9-3481700d8a20"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.669662 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5d07f699-63a8-4c42-a0d9-3481700d8a20" (UID: "5d07f699-63a8-4c42-a0d9-3481700d8a20"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:56 crc kubenswrapper[4792]: E1202 18:57:56.684795 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb podName:5d07f699-63a8-4c42-a0d9-3481700d8a20 nodeName:}" failed. No retries permitted until 2025-12-02 18:57:57.184768248 +0000 UTC m=+1307.957660576 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovsdbserver-sb" (UniqueName: "kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb") pod "5d07f699-63a8-4c42-a0d9-3481700d8a20" (UID: "5d07f699-63a8-4c42-a0d9-3481700d8a20") : error deleting /var/lib/kubelet/pods/5d07f699-63a8-4c42-a0d9-3481700d8a20/volume-subpaths: remove /var/lib/kubelet/pods/5d07f699-63a8-4c42-a0d9-3481700d8a20/volume-subpaths: no such file or directory Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.685339 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5d07f699-63a8-4c42-a0d9-3481700d8a20" (UID: "5d07f699-63a8-4c42-a0d9-3481700d8a20"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.711100 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lckws\" (UniqueName: \"kubernetes.io/projected/5d07f699-63a8-4c42-a0d9-3481700d8a20-kube-api-access-lckws\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.711130 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.711145 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.711157 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.711170 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.840185 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.845565 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.893462 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb410397-57d4-441b-b612-0f05926cc480","Type":"ContainerStarted","Data":"a63e000560e698cd5693c782757aa62d0c61c26addcfa725df70e3f89b6c8e39"} Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.897976 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" event={"ID":"cc372579-8ed3-41c8-9a89-6e3c026e3d6a","Type":"ContainerStarted","Data":"135740db455caf8a7a1724cf126d07518a91f072de7c1c85bc49c3753fc8f292"} Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.898099 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.903329 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b5005838-4f7e-4571-be44-5a07b3746f37","Type":"ContainerStarted","Data":"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1"} Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.905190 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.905344 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-2rdm4" event={"ID":"5d07f699-63a8-4c42-a0d9-3481700d8a20","Type":"ContainerDied","Data":"6732ef9e7f462133f468fb2b17a8319fb72e8609a15c96dab81ab8cae2595783"} Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.905369 4792 scope.go:117] "RemoveContainer" containerID="3ce0c7d9b59e9e36a6a00f8c0133df463df3cfb100c5524fb303ee75b80538d5" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.911671 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 02 18:57:56 crc kubenswrapper[4792]: I1202 18:57:56.925106 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" podStartSLOduration=3.925091875 podStartE2EDuration="3.925091875s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:56.913202267 +0000 UTC m=+1307.686094595" watchObservedRunningTime="2025-12-02 18:57:56.925091875 +0000 UTC m=+1307.697984203" Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.221416 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb\") pod \"5d07f699-63a8-4c42-a0d9-3481700d8a20\" (UID: \"5d07f699-63a8-4c42-a0d9-3481700d8a20\") " Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.221918 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5d07f699-63a8-4c42-a0d9-3481700d8a20" (UID: "5d07f699-63a8-4c42-a0d9-3481700d8a20"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.222303 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d07f699-63a8-4c42-a0d9-3481700d8a20-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.568035 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" path="/var/lib/kubelet/pods/ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97/volumes" Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.600874 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-2rdm4"] Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.615368 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-2rdm4"] Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.924295 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb410397-57d4-441b-b612-0f05926cc480","Type":"ContainerStarted","Data":"826894c8fb1fb7d39a1b114b103fbc063f12c619f40e9bba842aac2d41a33f57"} Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.927567 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b5005838-4f7e-4571-be44-5a07b3746f37","Type":"ContainerStarted","Data":"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2"} Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.927801 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-log" containerID="cri-o://77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1" gracePeriod=30 Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.927866 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-httpd" containerID="cri-o://741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2" gracePeriod=30 Dec 02 18:57:57 crc kubenswrapper[4792]: I1202 18:57:57.958070 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.958051961 podStartE2EDuration="4.958051961s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:57.947484307 +0000 UTC m=+1308.720376635" watchObservedRunningTime="2025-12-02 18:57:57.958051961 +0000 UTC m=+1308.730944289" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.690170 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.862576 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-config-data\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.862888 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-logs\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.862941 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-httpd-run\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.863000 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jw49l\" (UniqueName: \"kubernetes.io/projected/b5005838-4f7e-4571-be44-5a07b3746f37-kube-api-access-jw49l\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.863027 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-combined-ca-bundle\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.863106 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.863164 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-internal-tls-certs\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.863190 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-scripts\") pod \"b5005838-4f7e-4571-be44-5a07b3746f37\" (UID: \"b5005838-4f7e-4571-be44-5a07b3746f37\") " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.864428 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.865156 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-logs" (OuterVolumeSpecName: "logs") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.871343 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-scripts" (OuterVolumeSpecName: "scripts") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.890660 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5005838-4f7e-4571-be44-5a07b3746f37-kube-api-access-jw49l" (OuterVolumeSpecName: "kube-api-access-jw49l") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "kube-api-access-jw49l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.908463 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45" (OuterVolumeSpecName: "glance") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.909364 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.930542 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.945173 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb410397-57d4-441b-b612-0f05926cc480","Type":"ContainerStarted","Data":"4cc5f89a2ac81f99ed816c69d3a3c0dda7054cfbb9e59772e9da02e25c570404"} Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.945498 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-config-data" (OuterVolumeSpecName: "config-data") pod "b5005838-4f7e-4571-be44-5a07b3746f37" (UID: "b5005838-4f7e-4571-be44-5a07b3746f37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.945539 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-log" containerID="cri-o://826894c8fb1fb7d39a1b114b103fbc063f12c619f40e9bba842aac2d41a33f57" gracePeriod=30 Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.945640 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-httpd" containerID="cri-o://4cc5f89a2ac81f99ed816c69d3a3c0dda7054cfbb9e59772e9da02e25c570404" gracePeriod=30 Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.950550 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5005838-4f7e-4571-be44-5a07b3746f37" containerID="741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2" exitCode=143 Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.950576 4792 generic.go:334] "Generic (PLEG): container finished" podID="b5005838-4f7e-4571-be44-5a07b3746f37" containerID="77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1" exitCode=143 Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.950600 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b5005838-4f7e-4571-be44-5a07b3746f37","Type":"ContainerDied","Data":"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2"} Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.950624 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b5005838-4f7e-4571-be44-5a07b3746f37","Type":"ContainerDied","Data":"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1"} Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.950636 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b5005838-4f7e-4571-be44-5a07b3746f37","Type":"ContainerDied","Data":"200b50367f8dfb6b50e51fff7d0d7412b0054114929bc0bc2ca6292181ef2576"} Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.950652 4792 scope.go:117] "RemoveContainer" containerID="741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.950766 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.965996 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.966025 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b5005838-4f7e-4571-be44-5a07b3746f37-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.966035 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jw49l\" (UniqueName: \"kubernetes.io/projected/b5005838-4f7e-4571-be44-5a07b3746f37-kube-api-access-jw49l\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.966047 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.966077 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") on node \"crc\" " Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.966086 4792 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.966096 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.966105 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5005838-4f7e-4571-be44-5a07b3746f37-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:58 crc kubenswrapper[4792]: I1202 18:57:58.971476 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.971458109 podStartE2EDuration="6.971458109s" podCreationTimestamp="2025-12-02 18:57:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:57:58.960514726 +0000 UTC m=+1309.733407054" watchObservedRunningTime="2025-12-02 18:57:58.971458109 +0000 UTC m=+1309.744350437" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.003832 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.010093 4792 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.010438 4792 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45") on node "crc" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.013433 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040186 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:59 crc kubenswrapper[4792]: E1202 18:57:59.040638 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-log" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040649 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-log" Dec 02 18:57:59 crc kubenswrapper[4792]: E1202 18:57:59.040678 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerName="dnsmasq-dns" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040685 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerName="dnsmasq-dns" Dec 02 18:57:59 crc kubenswrapper[4792]: E1202 18:57:59.040696 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d07f699-63a8-4c42-a0d9-3481700d8a20" containerName="init" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040702 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d07f699-63a8-4c42-a0d9-3481700d8a20" containerName="init" Dec 02 18:57:59 crc kubenswrapper[4792]: E1202 18:57:59.040715 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-httpd" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040721 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-httpd" Dec 02 18:57:59 crc kubenswrapper[4792]: E1202 18:57:59.040737 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerName="init" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040744 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerName="init" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040924 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-log" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040936 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae2a5fe7-e4fc-4e75-ada1-7dad7db02f97" containerName="dnsmasq-dns" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040949 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" containerName="glance-httpd" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.040956 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d07f699-63a8-4c42-a0d9-3481700d8a20" containerName="init" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.048838 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.051795 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.051840 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.071240 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.072315 4792 reconciler_common.go:293] "Volume detached for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") on node \"crc\" DevicePath \"\"" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.173506 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.173804 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.173859 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-logs\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.173902 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.173933 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgtfw\" (UniqueName: \"kubernetes.io/projected/244d824b-faf3-4749-adeb-b3c5c13004b4-kube-api-access-dgtfw\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.173952 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.173974 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.174007 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276211 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgtfw\" (UniqueName: \"kubernetes.io/projected/244d824b-faf3-4749-adeb-b3c5c13004b4-kube-api-access-dgtfw\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276263 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276292 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276333 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276392 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276409 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276461 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-logs\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.276505 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.277253 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.277486 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-logs\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.279700 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.279730 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/80e64c4b1f5f631bf26b32fa972d35244bcdcbb9d2d00ddd0ab5edaa6a730928/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.282648 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.282928 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.284458 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.286905 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.294870 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgtfw\" (UniqueName: \"kubernetes.io/projected/244d824b-faf3-4749-adeb-b3c5c13004b4-kube-api-access-dgtfw\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.322432 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.368513 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.566365 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d07f699-63a8-4c42-a0d9-3481700d8a20" path="/var/lib/kubelet/pods/5d07f699-63a8-4c42-a0d9-3481700d8a20/volumes" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.567396 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5005838-4f7e-4571-be44-5a07b3746f37" path="/var/lib/kubelet/pods/b5005838-4f7e-4571-be44-5a07b3746f37/volumes" Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.978958 4792 generic.go:334] "Generic (PLEG): container finished" podID="cb410397-57d4-441b-b612-0f05926cc480" containerID="4cc5f89a2ac81f99ed816c69d3a3c0dda7054cfbb9e59772e9da02e25c570404" exitCode=0 Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.979004 4792 generic.go:334] "Generic (PLEG): container finished" podID="cb410397-57d4-441b-b612-0f05926cc480" containerID="826894c8fb1fb7d39a1b114b103fbc063f12c619f40e9bba842aac2d41a33f57" exitCode=143 Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.979186 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb410397-57d4-441b-b612-0f05926cc480","Type":"ContainerDied","Data":"4cc5f89a2ac81f99ed816c69d3a3c0dda7054cfbb9e59772e9da02e25c570404"} Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.979226 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb410397-57d4-441b-b612-0f05926cc480","Type":"ContainerDied","Data":"826894c8fb1fb7d39a1b114b103fbc063f12c619f40e9bba842aac2d41a33f57"} Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.983515 4792 generic.go:334] "Generic (PLEG): container finished" podID="0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" containerID="878a6446d1a442e6e2bca3c1e5afff1787a2da6fa21d22591aa8c78d44356b7f" exitCode=0 Dec 02 18:57:59 crc kubenswrapper[4792]: I1202 18:57:59.983556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hxh97" event={"ID":"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa","Type":"ContainerDied","Data":"878a6446d1a442e6e2bca3c1e5afff1787a2da6fa21d22591aa8c78d44356b7f"} Dec 02 18:58:01 crc kubenswrapper[4792]: E1202 18:58:01.412937 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f2d96d0_f671_46cd_8e95_162a0773470d.slice\": RecentStats: unable to find data in memory cache]" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.039471 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.044169 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hxh97" event={"ID":"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa","Type":"ContainerDied","Data":"4ff1759a6625a7b333fdb8b969b00f2a621ecea333db2b9dda259838d75df17d"} Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.044208 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ff1759a6625a7b333fdb8b969b00f2a621ecea333db2b9dda259838d75df17d" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.122932 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.220359 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-fernet-keys\") pod \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.220425 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-scripts\") pod \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.220563 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2lqx\" (UniqueName: \"kubernetes.io/projected/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-kube-api-access-h2lqx\") pod \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.220739 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-credential-keys\") pod \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.220827 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-config-data\") pod \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.220859 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-combined-ca-bundle\") pod \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\" (UID: \"0eb1ec35-0b74-4b18-9c17-63f1f13dccaa\") " Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.228907 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-scripts" (OuterVolumeSpecName: "scripts") pod "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" (UID: "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.234212 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-n56s7"] Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.234449 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-n56s7" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" containerID="cri-o://560181dc27cc02423fda8f70bd5fe4924e2a1394e26186415302fa3a357de7fd" gracePeriod=10 Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.238268 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" (UID: "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.242655 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" (UID: "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.259149 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-kube-api-access-h2lqx" (OuterVolumeSpecName: "kube-api-access-h2lqx") pod "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" (UID: "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa"). InnerVolumeSpecName "kube-api-access-h2lqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.264770 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" (UID: "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.271490 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-config-data" (OuterVolumeSpecName: "config-data") pod "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" (UID: "0eb1ec35-0b74-4b18-9c17-63f1f13dccaa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.322743 4792 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.322993 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.323002 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2lqx\" (UniqueName: \"kubernetes.io/projected/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-kube-api-access-h2lqx\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.323014 4792 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.323023 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:04 crc kubenswrapper[4792]: I1202 18:58:04.323031 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.056644 4792 generic.go:334] "Generic (PLEG): container finished" podID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerID="560181dc27cc02423fda8f70bd5fe4924e2a1394e26186415302fa3a357de7fd" exitCode=0 Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.056776 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hxh97" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.056769 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-n56s7" event={"ID":"cb42b8ac-ce52-46dc-8742-339d3ea9ab03","Type":"ContainerDied","Data":"560181dc27cc02423fda8f70bd5fe4924e2a1394e26186415302fa3a357de7fd"} Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.150270 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-hxh97"] Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.161371 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-hxh97"] Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.240766 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-455zq"] Dec 02 18:58:05 crc kubenswrapper[4792]: E1202 18:58:05.241333 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" containerName="keystone-bootstrap" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.241362 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" containerName="keystone-bootstrap" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.241771 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" containerName="keystone-bootstrap" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.242895 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.245286 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phdxk" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.245422 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.245598 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.245622 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.261145 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-455zq"] Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.362233 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-credential-keys\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.362304 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cwvs\" (UniqueName: \"kubernetes.io/projected/df41e02b-ae80-4435-bcd8-df8b9549e73d-kube-api-access-8cwvs\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.362416 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-config-data\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.362493 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-scripts\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.362594 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-combined-ca-bundle\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.362721 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-fernet-keys\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.461075 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-n56s7" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: connect: connection refused" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.464945 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-credential-keys\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.465016 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cwvs\" (UniqueName: \"kubernetes.io/projected/df41e02b-ae80-4435-bcd8-df8b9549e73d-kube-api-access-8cwvs\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.465096 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-config-data\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.465191 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-scripts\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.465250 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-combined-ca-bundle\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.465369 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-fernet-keys\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.471597 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-fernet-keys\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.472471 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-config-data\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.480150 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-scripts\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.480592 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-credential-keys\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.481151 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-combined-ca-bundle\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.482924 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cwvs\" (UniqueName: \"kubernetes.io/projected/df41e02b-ae80-4435-bcd8-df8b9549e73d-kube-api-access-8cwvs\") pod \"keystone-bootstrap-455zq\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.552244 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb1ec35-0b74-4b18-9c17-63f1f13dccaa" path="/var/lib/kubelet/pods/0eb1ec35-0b74-4b18-9c17-63f1f13dccaa/volumes" Dec 02 18:58:05 crc kubenswrapper[4792]: I1202 18:58:05.561924 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:08 crc kubenswrapper[4792]: I1202 18:58:08.082033 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:58:08 crc kubenswrapper[4792]: I1202 18:58:08.082406 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:58:10 crc kubenswrapper[4792]: I1202 18:58:10.460907 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-n56s7" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: connect: connection refused" Dec 02 18:58:15 crc kubenswrapper[4792]: I1202 18:58:15.164974 4792 generic.go:334] "Generic (PLEG): container finished" podID="13ec79b9-9006-473e-a8c9-e0cc9069d983" containerID="2345285abe3985cfea4ef46be28ca339b0996bf16b15db21a8d1c12e43c278b8" exitCode=0 Dec 02 18:58:15 crc kubenswrapper[4792]: I1202 18:58:15.165059 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zmnrf" event={"ID":"13ec79b9-9006-473e-a8c9-e0cc9069d983","Type":"ContainerDied","Data":"2345285abe3985cfea4ef46be28ca339b0996bf16b15db21a8d1c12e43c278b8"} Dec 02 18:58:20 crc kubenswrapper[4792]: I1202 18:58:20.461028 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-n56s7" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: i/o timeout" Dec 02 18:58:20 crc kubenswrapper[4792]: I1202 18:58:20.461910 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:58:22 crc kubenswrapper[4792]: I1202 18:58:22.997907 4792 scope.go:117] "RemoveContainer" containerID="77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1" Dec 02 18:58:23 crc kubenswrapper[4792]: E1202 18:58:23.511009 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 02 18:58:23 crc kubenswrapper[4792]: E1202 18:58:23.511172 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dlhvk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-dc5j7_openstack(477f2280-d198-47f0-8f7a-cd76106d9f35): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:58:23 crc kubenswrapper[4792]: E1202 18:58:23.512461 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-dc5j7" podUID="477f2280-d198-47f0-8f7a-cd76106d9f35" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.632912 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.744771 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-public-tls-certs\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.744928 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.745008 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-httpd-run\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.745035 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp6jx\" (UniqueName: \"kubernetes.io/projected/cb410397-57d4-441b-b612-0f05926cc480-kube-api-access-wp6jx\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.745119 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-combined-ca-bundle\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.745152 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-config-data\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.745267 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-logs\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.745350 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-scripts\") pod \"cb410397-57d4-441b-b612-0f05926cc480\" (UID: \"cb410397-57d4-441b-b612-0f05926cc480\") " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.746690 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.747100 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-logs" (OuterVolumeSpecName: "logs") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.755166 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb410397-57d4-441b-b612-0f05926cc480-kube-api-access-wp6jx" (OuterVolumeSpecName: "kube-api-access-wp6jx") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "kube-api-access-wp6jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.773420 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7" (OuterVolumeSpecName: "glance") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.776464 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-scripts" (OuterVolumeSpecName: "scripts") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.806431 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.839700 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.847283 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.847312 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.847324 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.847335 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.847362 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") on node \"crc\" " Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.847377 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cb410397-57d4-441b-b612-0f05926cc480-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.847391 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp6jx\" (UniqueName: \"kubernetes.io/projected/cb410397-57d4-441b-b612-0f05926cc480-kube-api-access-wp6jx\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.851269 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-config-data" (OuterVolumeSpecName: "config-data") pod "cb410397-57d4-441b-b612-0f05926cc480" (UID: "cb410397-57d4-441b-b612-0f05926cc480"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.883982 4792 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.884142 4792 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7") on node "crc" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.954974 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb410397-57d4-441b-b612-0f05926cc480-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:23 crc kubenswrapper[4792]: I1202 18:58:23.955019 4792 reconciler_common.go:293] "Volume detached for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.087385 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.097941 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.175832 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtpjh\" (UniqueName: \"kubernetes.io/projected/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-kube-api-access-wtpjh\") pod \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.175925 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-sb\") pod \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.176023 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-nb\") pod \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.176085 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-config\") pod \"13ec79b9-9006-473e-a8c9-e0cc9069d983\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.176114 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hks82\" (UniqueName: \"kubernetes.io/projected/13ec79b9-9006-473e-a8c9-e0cc9069d983-kube-api-access-hks82\") pod \"13ec79b9-9006-473e-a8c9-e0cc9069d983\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.176170 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-config\") pod \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.176204 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-combined-ca-bundle\") pod \"13ec79b9-9006-473e-a8c9-e0cc9069d983\" (UID: \"13ec79b9-9006-473e-a8c9-e0cc9069d983\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.176293 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-dns-svc\") pod \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\" (UID: \"cb42b8ac-ce52-46dc-8742-339d3ea9ab03\") " Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.186422 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13ec79b9-9006-473e-a8c9-e0cc9069d983-kube-api-access-hks82" (OuterVolumeSpecName: "kube-api-access-hks82") pod "13ec79b9-9006-473e-a8c9-e0cc9069d983" (UID: "13ec79b9-9006-473e-a8c9-e0cc9069d983"). InnerVolumeSpecName "kube-api-access-hks82". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.190110 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-kube-api-access-wtpjh" (OuterVolumeSpecName: "kube-api-access-wtpjh") pod "cb42b8ac-ce52-46dc-8742-339d3ea9ab03" (UID: "cb42b8ac-ce52-46dc-8742-339d3ea9ab03"). InnerVolumeSpecName "kube-api-access-wtpjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.251007 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13ec79b9-9006-473e-a8c9-e0cc9069d983" (UID: "13ec79b9-9006-473e-a8c9-e0cc9069d983"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.252374 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cb42b8ac-ce52-46dc-8742-339d3ea9ab03" (UID: "cb42b8ac-ce52-46dc-8742-339d3ea9ab03"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.254054 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-config" (OuterVolumeSpecName: "config") pod "13ec79b9-9006-473e-a8c9-e0cc9069d983" (UID: "13ec79b9-9006-473e-a8c9-e0cc9069d983"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.256318 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cb42b8ac-ce52-46dc-8742-339d3ea9ab03" (UID: "cb42b8ac-ce52-46dc-8742-339d3ea9ab03"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.263593 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-config" (OuterVolumeSpecName: "config") pod "cb42b8ac-ce52-46dc-8742-339d3ea9ab03" (UID: "cb42b8ac-ce52-46dc-8742-339d3ea9ab03"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.270855 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cb42b8ac-ce52-46dc-8742-339d3ea9ab03" (UID: "cb42b8ac-ce52-46dc-8742-339d3ea9ab03"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278531 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278561 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278579 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278591 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtpjh\" (UniqueName: \"kubernetes.io/projected/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-kube-api-access-wtpjh\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278604 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278616 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb42b8ac-ce52-46dc-8742-339d3ea9ab03-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278627 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/13ec79b9-9006-473e-a8c9-e0cc9069d983-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.278640 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hks82\" (UniqueName: \"kubernetes.io/projected/13ec79b9-9006-473e-a8c9-e0cc9069d983-kube-api-access-hks82\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.307852 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.307864 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cb410397-57d4-441b-b612-0f05926cc480","Type":"ContainerDied","Data":"a63e000560e698cd5693c782757aa62d0c61c26addcfa725df70e3f89b6c8e39"} Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.311476 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zmnrf" event={"ID":"13ec79b9-9006-473e-a8c9-e0cc9069d983","Type":"ContainerDied","Data":"5a6ca6eba2140733712ebd357afde1be644fb48b4049983310adb389737be8a3"} Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.311507 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a6ca6eba2140733712ebd357afde1be644fb48b4049983310adb389737be8a3" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.311579 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zmnrf" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.315988 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-n56s7" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.316683 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-n56s7" event={"ID":"cb42b8ac-ce52-46dc-8742-339d3ea9ab03","Type":"ContainerDied","Data":"5d6adc9c3e025f40d3b6d39542b5cc6f72a6d06d6d006542f0bc47e36752c9f2"} Dec 02 18:58:24 crc kubenswrapper[4792]: E1202 18:58:24.317994 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-dc5j7" podUID="477f2280-d198-47f0-8f7a-cd76106d9f35" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.354981 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.366246 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.384377 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-n56s7"] Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.392624 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-n56s7"] Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.399480 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:58:24 crc kubenswrapper[4792]: E1202 18:58:24.400040 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-httpd" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400064 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-httpd" Dec 02 18:58:24 crc kubenswrapper[4792]: E1202 18:58:24.400080 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ec79b9-9006-473e-a8c9-e0cc9069d983" containerName="neutron-db-sync" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400089 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ec79b9-9006-473e-a8c9-e0cc9069d983" containerName="neutron-db-sync" Dec 02 18:58:24 crc kubenswrapper[4792]: E1202 18:58:24.400110 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400119 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" Dec 02 18:58:24 crc kubenswrapper[4792]: E1202 18:58:24.400145 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="init" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400155 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="init" Dec 02 18:58:24 crc kubenswrapper[4792]: E1202 18:58:24.400170 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-log" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400180 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-log" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400395 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-log" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400425 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb410397-57d4-441b-b612-0f05926cc480" containerName="glance-httpd" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400444 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ec79b9-9006-473e-a8c9-e0cc9069d983" containerName="neutron-db-sync" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.400462 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.401853 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.406091 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.408513 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.408674 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483189 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483295 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483324 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483414 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-logs\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-config-data\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483470 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483497 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5thf5\" (UniqueName: \"kubernetes.io/projected/093444e9-8e53-44ae-bbd3-efcb0c374905-kube-api-access-5thf5\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.483570 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-scripts\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.584965 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.585008 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.585047 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-logs\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.585073 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-config-data\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.585087 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.585108 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5thf5\" (UniqueName: \"kubernetes.io/projected/093444e9-8e53-44ae-bbd3-efcb0c374905-kube-api-access-5thf5\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.585131 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-scripts\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.585228 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.586444 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.586621 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-logs\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.590596 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.591052 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-scripts\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.591294 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-config-data\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.595883 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.600413 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.600443 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7340b972542f32c74446fd2d8820f10387f1320bf84336609d21f0bd206378e7/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.617002 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5thf5\" (UniqueName: \"kubernetes.io/projected/093444e9-8e53-44ae-bbd3-efcb0c374905-kube-api-access-5thf5\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.660261 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " pod="openstack/glance-default-external-api-0" Dec 02 18:58:24 crc kubenswrapper[4792]: I1202 18:58:24.724663 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:58:25 crc kubenswrapper[4792]: E1202 18:58:25.315061 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 02 18:58:25 crc kubenswrapper[4792]: E1202 18:58:25.315492 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wwqvz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-t5xgj_openstack(50f2a0fa-fcf2-4f6c-be51-b78ae811fce5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:58:25 crc kubenswrapper[4792]: E1202 18:58:25.316845 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-t5xgj" podUID="50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.456259 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4d5bj"] Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.471970 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-n56s7" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.130:5353: i/o timeout" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.474383 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4d5bj"] Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.474476 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.501472 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.501549 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-svc\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.501639 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.501702 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-config\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.501770 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.501813 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7scw\" (UniqueName: \"kubernetes.io/projected/37e3266a-8a6c-4472-9bcd-459157fba9c8-kube-api-access-s7scw\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.504228 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dbdbd8fdb-qfn5f"] Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.519711 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.521402 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.521644 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.522394 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.522547 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-tsgx2" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.530883 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dbdbd8fdb-qfn5f"] Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.559173 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb410397-57d4-441b-b612-0f05926cc480" path="/var/lib/kubelet/pods/cb410397-57d4-441b-b612-0f05926cc480/volumes" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.560065 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb42b8ac-ce52-46dc-8742-339d3ea9ab03" path="/var/lib/kubelet/pods/cb42b8ac-ce52-46dc-8742-339d3ea9ab03/volumes" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603000 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-ovndb-tls-certs\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603083 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603278 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qx6x\" (UniqueName: \"kubernetes.io/projected/4e8170b3-9772-49c8-af59-87f59120f79e-kube-api-access-6qx6x\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603452 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-config\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603502 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-httpd-config\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603634 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-combined-ca-bundle\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603757 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-config\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603912 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603966 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.603972 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7scw\" (UniqueName: \"kubernetes.io/projected/37e3266a-8a6c-4472-9bcd-459157fba9c8-kube-api-access-s7scw\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.604024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.604066 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-svc\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.604719 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.604745 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-config\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.604752 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.605053 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-svc\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.629344 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7scw\" (UniqueName: \"kubernetes.io/projected/37e3266a-8a6c-4472-9bcd-459157fba9c8-kube-api-access-s7scw\") pod \"dnsmasq-dns-55f844cf75-4d5bj\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.705542 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-ovndb-tls-certs\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.705633 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qx6x\" (UniqueName: \"kubernetes.io/projected/4e8170b3-9772-49c8-af59-87f59120f79e-kube-api-access-6qx6x\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.705670 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-httpd-config\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.705699 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-combined-ca-bundle\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.705731 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-config\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.709105 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-httpd-config\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.711877 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-config\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.715624 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-combined-ca-bundle\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.723896 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-ovndb-tls-certs\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.726364 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qx6x\" (UniqueName: \"kubernetes.io/projected/4e8170b3-9772-49c8-af59-87f59120f79e-kube-api-access-6qx6x\") pod \"neutron-dbdbd8fdb-qfn5f\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.805717 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:25 crc kubenswrapper[4792]: I1202 18:58:25.848688 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:26 crc kubenswrapper[4792]: E1202 18:58:26.342267 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-t5xgj" podUID="50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.684126 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5c7cc4d64c-s885f"] Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.686507 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.691493 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.691734 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.703134 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c7cc4d64c-s885f"] Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.746561 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-public-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.746640 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-ovndb-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.746693 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-internal-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.746731 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-config\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.746752 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-combined-ca-bundle\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.746775 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbmlq\" (UniqueName: \"kubernetes.io/projected/034594f3-2d13-4657-9426-449348df341f-kube-api-access-qbmlq\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.746806 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-httpd-config\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.848302 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-public-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.848382 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-ovndb-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.848436 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-internal-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.848472 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-config\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.848494 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-combined-ca-bundle\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.848532 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbmlq\" (UniqueName: \"kubernetes.io/projected/034594f3-2d13-4657-9426-449348df341f-kube-api-access-qbmlq\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.848566 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-httpd-config\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.854445 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-public-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.854781 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-internal-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.855281 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-combined-ca-bundle\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.855515 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-httpd-config\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.856362 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-ovndb-tls-certs\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.863044 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/034594f3-2d13-4657-9426-449348df341f-config\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:27 crc kubenswrapper[4792]: I1202 18:58:27.870730 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbmlq\" (UniqueName: \"kubernetes.io/projected/034594f3-2d13-4657-9426-449348df341f-kube-api-access-qbmlq\") pod \"neutron-5c7cc4d64c-s885f\" (UID: \"034594f3-2d13-4657-9426-449348df341f\") " pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:28 crc kubenswrapper[4792]: I1202 18:58:28.015366 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.357416 4792 scope.go:117] "RemoveContainer" containerID="741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2" Dec 02 18:58:29 crc kubenswrapper[4792]: E1202 18:58:29.358253 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2\": container with ID starting with 741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2 not found: ID does not exist" containerID="741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.358277 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2"} err="failed to get container status \"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2\": rpc error: code = NotFound desc = could not find container \"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2\": container with ID starting with 741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2 not found: ID does not exist" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.358300 4792 scope.go:117] "RemoveContainer" containerID="77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1" Dec 02 18:58:29 crc kubenswrapper[4792]: E1202 18:58:29.358732 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1\": container with ID starting with 77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1 not found: ID does not exist" containerID="77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.358760 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1"} err="failed to get container status \"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1\": rpc error: code = NotFound desc = could not find container \"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1\": container with ID starting with 77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1 not found: ID does not exist" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.358772 4792 scope.go:117] "RemoveContainer" containerID="741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.359132 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2"} err="failed to get container status \"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2\": rpc error: code = NotFound desc = could not find container \"741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2\": container with ID starting with 741bd0852e8968a46456b75531d78f874585087ae19c9d14636ebd80a12420b2 not found: ID does not exist" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.359178 4792 scope.go:117] "RemoveContainer" containerID="77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.359464 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1"} err="failed to get container status \"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1\": rpc error: code = NotFound desc = could not find container \"77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1\": container with ID starting with 77ded948a82fdaa00ab78550791908cc7ade22cf2abb53668c89c90ce3d372e1 not found: ID does not exist" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.359490 4792 scope.go:117] "RemoveContainer" containerID="4cc5f89a2ac81f99ed816c69d3a3c0dda7054cfbb9e59772e9da02e25c570404" Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.902183 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:58:29 crc kubenswrapper[4792]: I1202 18:58:29.948065 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-455zq"] Dec 02 18:58:32 crc kubenswrapper[4792]: W1202 18:58:32.005609 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod244d824b_faf3_4749_adeb_b3c5c13004b4.slice/crio-e5716b5f51fd8eea71c5d651c2cb8fb6e447d954d8ea1bb1e48a68571c3ed461 WatchSource:0}: Error finding container e5716b5f51fd8eea71c5d651c2cb8fb6e447d954d8ea1bb1e48a68571c3ed461: Status 404 returned error can't find the container with id e5716b5f51fd8eea71c5d651c2cb8fb6e447d954d8ea1bb1e48a68571c3ed461 Dec 02 18:58:32 crc kubenswrapper[4792]: I1202 18:58:32.436324 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"244d824b-faf3-4749-adeb-b3c5c13004b4","Type":"ContainerStarted","Data":"e5716b5f51fd8eea71c5d651c2cb8fb6e447d954d8ea1bb1e48a68571c3ed461"} Dec 02 18:58:32 crc kubenswrapper[4792]: I1202 18:58:32.671991 4792 scope.go:117] "RemoveContainer" containerID="826894c8fb1fb7d39a1b114b103fbc063f12c619f40e9bba842aac2d41a33f57" Dec 02 18:58:32 crc kubenswrapper[4792]: E1202 18:58:32.700739 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 02 18:58:32 crc kubenswrapper[4792]: E1202 18:58:32.700810 4792 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 02 18:58:32 crc kubenswrapper[4792]: E1202 18:58:32.700989 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-km57l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-wh2hr_openstack(3df2077b-8a01-47ae-ad22-abfc02071c24): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 18:58:32 crc kubenswrapper[4792]: E1202 18:58:32.702645 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-wh2hr" podUID="3df2077b-8a01-47ae-ad22-abfc02071c24" Dec 02 18:58:32 crc kubenswrapper[4792]: I1202 18:58:32.895301 4792 scope.go:117] "RemoveContainer" containerID="560181dc27cc02423fda8f70bd5fe4924e2a1394e26186415302fa3a357de7fd" Dec 02 18:58:32 crc kubenswrapper[4792]: I1202 18:58:32.982986 4792 scope.go:117] "RemoveContainer" containerID="b3b16047b3c5ed24afe14403544dd24de9bf5daec6fc0cc8834024fc25e432a5" Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.073206 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.270360 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:58:33 crc kubenswrapper[4792]: W1202 18:58:33.279833 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod093444e9_8e53_44ae_bbd3_efcb0c374905.slice/crio-60cffb2de202197fba9908b0e982444621d9e79f6feb06983d62fd520c27f97c WatchSource:0}: Error finding container 60cffb2de202197fba9908b0e982444621d9e79f6feb06983d62fd520c27f97c: Status 404 returned error can't find the container with id 60cffb2de202197fba9908b0e982444621d9e79f6feb06983d62fd520c27f97c Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.359735 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4d5bj"] Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.435090 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dbdbd8fdb-qfn5f"] Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.448829 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerStarted","Data":"fccd05acdfae0190d57adb67e59ec0a292fdbda6b3beca1954a9d7ea96a1bdf9"} Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.454236 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-455zq" event={"ID":"df41e02b-ae80-4435-bcd8-df8b9549e73d","Type":"ContainerStarted","Data":"97a281cfc08d61b59207510a6ebdd6a9f22255a670c1c310d234606d7e3a07cf"} Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.454271 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-455zq" event={"ID":"df41e02b-ae80-4435-bcd8-df8b9549e73d","Type":"ContainerStarted","Data":"0b8abf6515560846144b14d656055e6ed87f2ea8938e98fe06a5f0c5b6354254"} Dec 02 18:58:33 crc kubenswrapper[4792]: W1202 18:58:33.455014 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e8170b3_9772_49c8_af59_87f59120f79e.slice/crio-61422a3266f50940e7054734aa641517271a0c79ea252bc5535ff1204b79aa6f WatchSource:0}: Error finding container 61422a3266f50940e7054734aa641517271a0c79ea252bc5535ff1204b79aa6f: Status 404 returned error can't find the container with id 61422a3266f50940e7054734aa641517271a0c79ea252bc5535ff1204b79aa6f Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.457928 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"093444e9-8e53-44ae-bbd3-efcb0c374905","Type":"ContainerStarted","Data":"60cffb2de202197fba9908b0e982444621d9e79f6feb06983d62fd520c27f97c"} Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.464709 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" event={"ID":"37e3266a-8a6c-4472-9bcd-459157fba9c8","Type":"ContainerStarted","Data":"09ab05cc8ca897b1a1d13e3db1f5407a89ca3a167c53f57686f6eec682214ea0"} Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.474886 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-455zq" podStartSLOduration=28.474867927 podStartE2EDuration="28.474867927s" podCreationTimestamp="2025-12-02 18:58:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:33.469413085 +0000 UTC m=+1344.242305413" watchObservedRunningTime="2025-12-02 18:58:33.474867927 +0000 UTC m=+1344.247760255" Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.479357 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-587gx" event={"ID":"617ad28d-5e59-4407-91e7-740824d3ce43","Type":"ContainerStarted","Data":"44560b614e0659c0a75630ffb0ac7c21ecd415bd18f4831b5ac1023d88282289"} Dec 02 18:58:33 crc kubenswrapper[4792]: E1202 18:58:33.482122 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current\\\"\"" pod="openstack/cloudkitty-db-sync-wh2hr" podUID="3df2077b-8a01-47ae-ad22-abfc02071c24" Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.542949 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-587gx" podStartSLOduration=11.34239312 podStartE2EDuration="40.54292674s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="2025-12-02 18:57:54.870586288 +0000 UTC m=+1305.643478616" lastFinishedPulling="2025-12-02 18:58:24.071119868 +0000 UTC m=+1334.844012236" observedRunningTime="2025-12-02 18:58:33.515076708 +0000 UTC m=+1344.287969026" watchObservedRunningTime="2025-12-02 18:58:33.54292674 +0000 UTC m=+1344.315819068" Dec 02 18:58:33 crc kubenswrapper[4792]: I1202 18:58:33.854667 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c7cc4d64c-s885f"] Dec 02 18:58:33 crc kubenswrapper[4792]: W1202 18:58:33.865807 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod034594f3_2d13_4657_9426_449348df341f.slice/crio-abd6eabd87a9033f51d4b8181fbf36c0ab39997de6948877e10b3f37be3aace6 WatchSource:0}: Error finding container abd6eabd87a9033f51d4b8181fbf36c0ab39997de6948877e10b3f37be3aace6: Status 404 returned error can't find the container with id abd6eabd87a9033f51d4b8181fbf36c0ab39997de6948877e10b3f37be3aace6 Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.506699 4792 generic.go:334] "Generic (PLEG): container finished" podID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerID="85a445d1592a140b8836b86170fd79ad5c3b274ea82493aae16c14f84c6531bc" exitCode=0 Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.507181 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" event={"ID":"37e3266a-8a6c-4472-9bcd-459157fba9c8","Type":"ContainerDied","Data":"85a445d1592a140b8836b86170fd79ad5c3b274ea82493aae16c14f84c6531bc"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.514906 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"093444e9-8e53-44ae-bbd3-efcb0c374905","Type":"ContainerStarted","Data":"86bcb694850473f45b157cab869bfab13bb5d124c31275ba77dd4e7dd0f23b8d"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.521147 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdbd8fdb-qfn5f" event={"ID":"4e8170b3-9772-49c8-af59-87f59120f79e","Type":"ContainerStarted","Data":"2a1bf3cca136c798737aa7c278cec751d646cc89e57e234d02cfd751a9034823"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.521195 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdbd8fdb-qfn5f" event={"ID":"4e8170b3-9772-49c8-af59-87f59120f79e","Type":"ContainerStarted","Data":"4300cdeec8594eb1591851d81d17471828b13074e67a7bdcb89053867bc4d3b4"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.521206 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdbd8fdb-qfn5f" event={"ID":"4e8170b3-9772-49c8-af59-87f59120f79e","Type":"ContainerStarted","Data":"61422a3266f50940e7054734aa641517271a0c79ea252bc5535ff1204b79aa6f"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.522147 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.532765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"244d824b-faf3-4749-adeb-b3c5c13004b4","Type":"ContainerStarted","Data":"a554100aefd6e5dd5c34558cc0db5e362dd200bc121265727fce77288da98872"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.532806 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"244d824b-faf3-4749-adeb-b3c5c13004b4","Type":"ContainerStarted","Data":"145bf06aca750b3bdf51332770863e24fd73368dfbe685596acbc8bf9444dc4b"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.543528 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c7cc4d64c-s885f" event={"ID":"034594f3-2d13-4657-9426-449348df341f","Type":"ContainerStarted","Data":"5ec13d4a61b0ee85d2efc27f088ec884dde1927f2a438802d8aab7c47e6611fb"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.543595 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c7cc4d64c-s885f" event={"ID":"034594f3-2d13-4657-9426-449348df341f","Type":"ContainerStarted","Data":"abd6eabd87a9033f51d4b8181fbf36c0ab39997de6948877e10b3f37be3aace6"} Dec 02 18:58:34 crc kubenswrapper[4792]: I1202 18:58:34.562725 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dbdbd8fdb-qfn5f" podStartSLOduration=9.562706124 podStartE2EDuration="9.562706124s" podCreationTimestamp="2025-12-02 18:58:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:34.554586153 +0000 UTC m=+1345.327478481" watchObservedRunningTime="2025-12-02 18:58:34.562706124 +0000 UTC m=+1345.335598452" Dec 02 18:58:35 crc kubenswrapper[4792]: I1202 18:58:35.598332 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=37.598310768 podStartE2EDuration="37.598310768s" podCreationTimestamp="2025-12-02 18:57:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:34.583190165 +0000 UTC m=+1345.356082493" watchObservedRunningTime="2025-12-02 18:58:35.598310768 +0000 UTC m=+1346.371203106" Dec 02 18:58:35 crc kubenswrapper[4792]: I1202 18:58:35.601689 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"093444e9-8e53-44ae-bbd3-efcb0c374905","Type":"ContainerStarted","Data":"d10a40ec1c6dd79d132d13e2508e96c3c0c660d6f43d15aa0c96c97cc90c45c3"} Dec 02 18:58:35 crc kubenswrapper[4792]: I1202 18:58:35.605316 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c7cc4d64c-s885f" event={"ID":"034594f3-2d13-4657-9426-449348df341f","Type":"ContainerStarted","Data":"7b14642438ce55fc2a11fefe44b71499e2c342e2f37a463eed960e6b582c72f9"} Dec 02 18:58:35 crc kubenswrapper[4792]: I1202 18:58:35.605948 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:35 crc kubenswrapper[4792]: I1202 18:58:35.610586 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerStarted","Data":"23c4030bca0c0424a8ff69891fb0f6de0457c694bc6a0c79ccf55185d7c4d351"} Dec 02 18:58:35 crc kubenswrapper[4792]: I1202 18:58:35.628892 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=11.628873431 podStartE2EDuration="11.628873431s" podCreationTimestamp="2025-12-02 18:58:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:35.622643319 +0000 UTC m=+1346.395535647" watchObservedRunningTime="2025-12-02 18:58:35.628873431 +0000 UTC m=+1346.401765759" Dec 02 18:58:35 crc kubenswrapper[4792]: I1202 18:58:35.657192 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5c7cc4d64c-s885f" podStartSLOduration=8.657170604000001 podStartE2EDuration="8.657170604s" podCreationTimestamp="2025-12-02 18:58:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:35.649170846 +0000 UTC m=+1346.422063174" watchObservedRunningTime="2025-12-02 18:58:35.657170604 +0000 UTC m=+1346.430062952" Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.648707 4792 generic.go:334] "Generic (PLEG): container finished" podID="df41e02b-ae80-4435-bcd8-df8b9549e73d" containerID="97a281cfc08d61b59207510a6ebdd6a9f22255a670c1c310d234606d7e3a07cf" exitCode=0 Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.649048 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-455zq" event={"ID":"df41e02b-ae80-4435-bcd8-df8b9549e73d","Type":"ContainerDied","Data":"97a281cfc08d61b59207510a6ebdd6a9f22255a670c1c310d234606d7e3a07cf"} Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.655205 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" event={"ID":"37e3266a-8a6c-4472-9bcd-459157fba9c8","Type":"ContainerStarted","Data":"1340723c8cb786b6643bcc1d4da66b7941aa1f104c91ba38db06f74153d8b11f"} Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.655248 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.665833 4792 generic.go:334] "Generic (PLEG): container finished" podID="617ad28d-5e59-4407-91e7-740824d3ce43" containerID="44560b614e0659c0a75630ffb0ac7c21ecd415bd18f4831b5ac1023d88282289" exitCode=0 Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.665896 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-587gx" event={"ID":"617ad28d-5e59-4407-91e7-740824d3ce43","Type":"ContainerDied","Data":"44560b614e0659c0a75630ffb0ac7c21ecd415bd18f4831b5ac1023d88282289"} Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.676474 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dc5j7" event={"ID":"477f2280-d198-47f0-8f7a-cd76106d9f35","Type":"ContainerStarted","Data":"a221f5350151ff73453af64669d33484cd1eed2a9842abb2fa574e9db8014099"} Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.724356 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" podStartSLOduration=11.724321446 podStartE2EDuration="11.724321446s" podCreationTimestamp="2025-12-02 18:58:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:36.721929424 +0000 UTC m=+1347.494821762" watchObservedRunningTime="2025-12-02 18:58:36.724321446 +0000 UTC m=+1347.497213764" Dec 02 18:58:36 crc kubenswrapper[4792]: I1202 18:58:36.763770 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-dc5j7" podStartSLOduration=2.364400344 podStartE2EDuration="43.763730777s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="2025-12-02 18:57:54.679480346 +0000 UTC m=+1305.452372674" lastFinishedPulling="2025-12-02 18:58:36.078810769 +0000 UTC m=+1346.851703107" observedRunningTime="2025-12-02 18:58:36.75573387 +0000 UTC m=+1347.528626198" watchObservedRunningTime="2025-12-02 18:58:36.763730777 +0000 UTC m=+1347.536623105" Dec 02 18:58:38 crc kubenswrapper[4792]: I1202 18:58:38.082711 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:58:38 crc kubenswrapper[4792]: I1202 18:58:38.082912 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:58:38 crc kubenswrapper[4792]: I1202 18:58:38.706890 4792 generic.go:334] "Generic (PLEG): container finished" podID="477f2280-d198-47f0-8f7a-cd76106d9f35" containerID="a221f5350151ff73453af64669d33484cd1eed2a9842abb2fa574e9db8014099" exitCode=0 Dec 02 18:58:38 crc kubenswrapper[4792]: I1202 18:58:38.706938 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dc5j7" event={"ID":"477f2280-d198-47f0-8f7a-cd76106d9f35","Type":"ContainerDied","Data":"a221f5350151ff73453af64669d33484cd1eed2a9842abb2fa574e9db8014099"} Dec 02 18:58:39 crc kubenswrapper[4792]: I1202 18:58:39.369417 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:39 crc kubenswrapper[4792]: I1202 18:58:39.369793 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:39 crc kubenswrapper[4792]: I1202 18:58:39.400688 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:39 crc kubenswrapper[4792]: I1202 18:58:39.420939 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:39 crc kubenswrapper[4792]: I1202 18:58:39.716771 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:39 crc kubenswrapper[4792]: I1202 18:58:39.716802 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:40 crc kubenswrapper[4792]: I1202 18:58:40.810354 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:40 crc kubenswrapper[4792]: I1202 18:58:40.865937 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pc89n"] Dec 02 18:58:40 crc kubenswrapper[4792]: I1202 18:58:40.866166 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" podUID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerName="dnsmasq-dns" containerID="cri-o://135740db455caf8a7a1724cf126d07518a91f072de7c1c85bc49c3753fc8f292" gracePeriod=10 Dec 02 18:58:41 crc kubenswrapper[4792]: I1202 18:58:41.740580 4792 generic.go:334] "Generic (PLEG): container finished" podID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerID="135740db455caf8a7a1724cf126d07518a91f072de7c1c85bc49c3753fc8f292" exitCode=0 Dec 02 18:58:41 crc kubenswrapper[4792]: I1202 18:58:41.740694 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" event={"ID":"cc372579-8ed3-41c8-9a89-6e3c026e3d6a","Type":"ContainerDied","Data":"135740db455caf8a7a1724cf126d07518a91f072de7c1c85bc49c3753fc8f292"} Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.374256 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.402395 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.410697 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-587gx" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511230 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlhvk\" (UniqueName: \"kubernetes.io/projected/477f2280-d198-47f0-8f7a-cd76106d9f35-kube-api-access-dlhvk\") pod \"477f2280-d198-47f0-8f7a-cd76106d9f35\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511326 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cwvs\" (UniqueName: \"kubernetes.io/projected/df41e02b-ae80-4435-bcd8-df8b9549e73d-kube-api-access-8cwvs\") pod \"df41e02b-ae80-4435-bcd8-df8b9549e73d\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511378 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-db-sync-config-data\") pod \"477f2280-d198-47f0-8f7a-cd76106d9f35\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511424 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-scripts\") pod \"617ad28d-5e59-4407-91e7-740824d3ce43\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511446 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-fernet-keys\") pod \"df41e02b-ae80-4435-bcd8-df8b9549e73d\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511477 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-combined-ca-bundle\") pod \"617ad28d-5e59-4407-91e7-740824d3ce43\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511499 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-scripts\") pod \"df41e02b-ae80-4435-bcd8-df8b9549e73d\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511579 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-config-data\") pod \"617ad28d-5e59-4407-91e7-740824d3ce43\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511606 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-config-data\") pod \"df41e02b-ae80-4435-bcd8-df8b9549e73d\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511627 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/617ad28d-5e59-4407-91e7-740824d3ce43-logs\") pod \"617ad28d-5e59-4407-91e7-740824d3ce43\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511715 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-credential-keys\") pod \"df41e02b-ae80-4435-bcd8-df8b9549e73d\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511741 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-combined-ca-bundle\") pod \"477f2280-d198-47f0-8f7a-cd76106d9f35\" (UID: \"477f2280-d198-47f0-8f7a-cd76106d9f35\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511759 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrrqv\" (UniqueName: \"kubernetes.io/projected/617ad28d-5e59-4407-91e7-740824d3ce43-kube-api-access-qrrqv\") pod \"617ad28d-5e59-4407-91e7-740824d3ce43\" (UID: \"617ad28d-5e59-4407-91e7-740824d3ce43\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.511788 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-combined-ca-bundle\") pod \"df41e02b-ae80-4435-bcd8-df8b9549e73d\" (UID: \"df41e02b-ae80-4435-bcd8-df8b9549e73d\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.512911 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/617ad28d-5e59-4407-91e7-740824d3ce43-logs" (OuterVolumeSpecName: "logs") pod "617ad28d-5e59-4407-91e7-740824d3ce43" (UID: "617ad28d-5e59-4407-91e7-740824d3ce43"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.516112 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-scripts" (OuterVolumeSpecName: "scripts") pod "df41e02b-ae80-4435-bcd8-df8b9549e73d" (UID: "df41e02b-ae80-4435-bcd8-df8b9549e73d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.520650 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477f2280-d198-47f0-8f7a-cd76106d9f35-kube-api-access-dlhvk" (OuterVolumeSpecName: "kube-api-access-dlhvk") pod "477f2280-d198-47f0-8f7a-cd76106d9f35" (UID: "477f2280-d198-47f0-8f7a-cd76106d9f35"). InnerVolumeSpecName "kube-api-access-dlhvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.520742 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "477f2280-d198-47f0-8f7a-cd76106d9f35" (UID: "477f2280-d198-47f0-8f7a-cd76106d9f35"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.524249 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df41e02b-ae80-4435-bcd8-df8b9549e73d-kube-api-access-8cwvs" (OuterVolumeSpecName: "kube-api-access-8cwvs") pod "df41e02b-ae80-4435-bcd8-df8b9549e73d" (UID: "df41e02b-ae80-4435-bcd8-df8b9549e73d"). InnerVolumeSpecName "kube-api-access-8cwvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.524327 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-scripts" (OuterVolumeSpecName: "scripts") pod "617ad28d-5e59-4407-91e7-740824d3ce43" (UID: "617ad28d-5e59-4407-91e7-740824d3ce43"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.524386 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "df41e02b-ae80-4435-bcd8-df8b9549e73d" (UID: "df41e02b-ae80-4435-bcd8-df8b9549e73d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.528616 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/617ad28d-5e59-4407-91e7-740824d3ce43-kube-api-access-qrrqv" (OuterVolumeSpecName: "kube-api-access-qrrqv") pod "617ad28d-5e59-4407-91e7-740824d3ce43" (UID: "617ad28d-5e59-4407-91e7-740824d3ce43"). InnerVolumeSpecName "kube-api-access-qrrqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.546127 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "df41e02b-ae80-4435-bcd8-df8b9549e73d" (UID: "df41e02b-ae80-4435-bcd8-df8b9549e73d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.546280 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-config-data" (OuterVolumeSpecName: "config-data") pod "617ad28d-5e59-4407-91e7-740824d3ce43" (UID: "617ad28d-5e59-4407-91e7-740824d3ce43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.549219 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.568411 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-config-data" (OuterVolumeSpecName: "config-data") pod "df41e02b-ae80-4435-bcd8-df8b9549e73d" (UID: "df41e02b-ae80-4435-bcd8-df8b9549e73d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.574054 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "617ad28d-5e59-4407-91e7-740824d3ce43" (UID: "617ad28d-5e59-4407-91e7-740824d3ce43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.577391 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "477f2280-d198-47f0-8f7a-cd76106d9f35" (UID: "477f2280-d198-47f0-8f7a-cd76106d9f35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.588679 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df41e02b-ae80-4435-bcd8-df8b9549e73d" (UID: "df41e02b-ae80-4435-bcd8-df8b9549e73d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615170 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlhvk\" (UniqueName: \"kubernetes.io/projected/477f2280-d198-47f0-8f7a-cd76106d9f35-kube-api-access-dlhvk\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615198 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cwvs\" (UniqueName: \"kubernetes.io/projected/df41e02b-ae80-4435-bcd8-df8b9549e73d-kube-api-access-8cwvs\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615207 4792 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615230 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615240 4792 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615249 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615256 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615265 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617ad28d-5e59-4407-91e7-740824d3ce43-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615273 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615280 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/617ad28d-5e59-4407-91e7-740824d3ce43-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615287 4792 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615294 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f2280-d198-47f0-8f7a-cd76106d9f35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615302 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrrqv\" (UniqueName: \"kubernetes.io/projected/617ad28d-5e59-4407-91e7-740824d3ce43-kube-api-access-qrrqv\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.615311 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df41e02b-ae80-4435-bcd8-df8b9549e73d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.716288 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-config\") pod \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.716405 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-nb\") pod \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.716463 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-svc\") pod \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.716481 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-swift-storage-0\") pod \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.716518 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxh2k\" (UniqueName: \"kubernetes.io/projected/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-kube-api-access-pxh2k\") pod \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.716614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-sb\") pod \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\" (UID: \"cc372579-8ed3-41c8-9a89-6e3c026e3d6a\") " Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.737175 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-kube-api-access-pxh2k" (OuterVolumeSpecName: "kube-api-access-pxh2k") pod "cc372579-8ed3-41c8-9a89-6e3c026e3d6a" (UID: "cc372579-8ed3-41c8-9a89-6e3c026e3d6a"). InnerVolumeSpecName "kube-api-access-pxh2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.774345 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-587gx" event={"ID":"617ad28d-5e59-4407-91e7-740824d3ce43","Type":"ContainerDied","Data":"e8cf3ddf89718d86c828fa432a12e73fbf817a24fb7fd92fb84ab47cee966a19"} Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.774387 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8cf3ddf89718d86c828fa432a12e73fbf817a24fb7fd92fb84ab47cee966a19" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.774461 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-587gx" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.788798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dc5j7" event={"ID":"477f2280-d198-47f0-8f7a-cd76106d9f35","Type":"ContainerDied","Data":"22619db8e9ad3a2969455359d4404e955863d6429ad6b597ddaf2de2359fb44d"} Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.788836 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22619db8e9ad3a2969455359d4404e955863d6429ad6b597ddaf2de2359fb44d" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.788924 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dc5j7" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.799747 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cc372579-8ed3-41c8-9a89-6e3c026e3d6a" (UID: "cc372579-8ed3-41c8-9a89-6e3c026e3d6a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.809510 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cc372579-8ed3-41c8-9a89-6e3c026e3d6a" (UID: "cc372579-8ed3-41c8-9a89-6e3c026e3d6a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.812002 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cc372579-8ed3-41c8-9a89-6e3c026e3d6a" (UID: "cc372579-8ed3-41c8-9a89-6e3c026e3d6a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.815924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerStarted","Data":"6728c502e45749fb7df30e2cd1d3d936b66e05d1ff271c0a71f57f64de33cae8"} Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.815936 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-config" (OuterVolumeSpecName: "config") pod "cc372579-8ed3-41c8-9a89-6e3c026e3d6a" (UID: "cc372579-8ed3-41c8-9a89-6e3c026e3d6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.819044 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-455zq" event={"ID":"df41e02b-ae80-4435-bcd8-df8b9549e73d","Type":"ContainerDied","Data":"0b8abf6515560846144b14d656055e6ed87f2ea8938e98fe06a5f0c5b6354254"} Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.819177 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b8abf6515560846144b14d656055e6ed87f2ea8938e98fe06a5f0c5b6354254" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.819243 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-455zq" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.821641 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.821709 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.821719 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxh2k\" (UniqueName: \"kubernetes.io/projected/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-kube-api-access-pxh2k\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.821751 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.821760 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.828827 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" event={"ID":"cc372579-8ed3-41c8-9a89-6e3c026e3d6a","Type":"ContainerDied","Data":"5467788e15fb6445e5aae87990e4fb350b1a270d25b7352000db7b55f5934dc3"} Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.828868 4792 scope.go:117] "RemoveContainer" containerID="135740db455caf8a7a1724cf126d07518a91f072de7c1c85bc49c3753fc8f292" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.829018 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-pc89n" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.831057 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cc372579-8ed3-41c8-9a89-6e3c026e3d6a" (UID: "cc372579-8ed3-41c8-9a89-6e3c026e3d6a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.876236 4792 scope.go:117] "RemoveContainer" containerID="51c51df0c3cf78cb79ba7c4fd881c56b3ef2efed97e3ab1001a7877c11fdf5f5" Dec 02 18:58:42 crc kubenswrapper[4792]: I1202 18:58:42.923733 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cc372579-8ed3-41c8-9a89-6e3c026e3d6a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.230208 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pc89n"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.239106 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-pc89n"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.550609 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" path="/var/lib/kubelet/pods/cc372579-8ed3-41c8-9a89-6e3c026e3d6a/volumes" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.676309 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-54b9cc4f54-2nnzj"] Dec 02 18:58:43 crc kubenswrapper[4792]: E1202 18:58:43.676707 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477f2280-d198-47f0-8f7a-cd76106d9f35" containerName="barbican-db-sync" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.676719 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="477f2280-d198-47f0-8f7a-cd76106d9f35" containerName="barbican-db-sync" Dec 02 18:58:43 crc kubenswrapper[4792]: E1202 18:58:43.676731 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="617ad28d-5e59-4407-91e7-740824d3ce43" containerName="placement-db-sync" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.676737 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="617ad28d-5e59-4407-91e7-740824d3ce43" containerName="placement-db-sync" Dec 02 18:58:43 crc kubenswrapper[4792]: E1202 18:58:43.676755 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerName="dnsmasq-dns" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.676763 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerName="dnsmasq-dns" Dec 02 18:58:43 crc kubenswrapper[4792]: E1202 18:58:43.676785 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerName="init" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.676791 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerName="init" Dec 02 18:58:43 crc kubenswrapper[4792]: E1202 18:58:43.676808 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df41e02b-ae80-4435-bcd8-df8b9549e73d" containerName="keystone-bootstrap" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.676814 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="df41e02b-ae80-4435-bcd8-df8b9549e73d" containerName="keystone-bootstrap" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.677005 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="477f2280-d198-47f0-8f7a-cd76106d9f35" containerName="barbican-db-sync" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.677027 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="617ad28d-5e59-4407-91e7-740824d3ce43" containerName="placement-db-sync" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.677046 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc372579-8ed3-41c8-9a89-6e3c026e3d6a" containerName="dnsmasq-dns" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.677061 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="df41e02b-ae80-4435-bcd8-df8b9549e73d" containerName="keystone-bootstrap" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.677761 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.679880 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.684630 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.684756 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.684879 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.684907 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.685176 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phdxk" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.751585 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-55bc995c96-sn8lv"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.753113 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.783123 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.783389 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.798698 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p2hl6" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.799007 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.799608 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.822665 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55bc995c96-sn8lv"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839481 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-fernet-keys\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839583 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-logs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839617 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bhfd\" (UniqueName: \"kubernetes.io/projected/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-kube-api-access-8bhfd\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839660 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-scripts\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839696 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2b9w\" (UniqueName: \"kubernetes.io/projected/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-kube-api-access-w2b9w\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839723 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-scripts\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839748 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-config-data\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839786 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-credential-keys\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839820 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-public-tls-certs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839925 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-config-data\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.839970 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-internal-tls-certs\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.840006 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-public-tls-certs\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.840030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-internal-tls-certs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.840103 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-combined-ca-bundle\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.840127 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-combined-ca-bundle\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.851502 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54b9cc4f54-2nnzj"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.879084 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-bdff8d974-fdcc5"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.881552 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.883545 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-t5xgj" event={"ID":"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5","Type":"ContainerStarted","Data":"c54b987c8c922ff0e0f587c87930474283cf03e99ed0e59de82bbccb35f85fed"} Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.884659 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vntnf" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.884847 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.892332 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.897435 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6d85ccb45-9bkkd"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.898937 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.920652 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.940097 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6d85ccb45-9bkkd"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-public-tls-certs\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944683 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-internal-tls-certs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944731 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-combined-ca-bundle\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944748 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-combined-ca-bundle\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944778 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-fernet-keys\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944812 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-logs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944834 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bhfd\" (UniqueName: \"kubernetes.io/projected/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-kube-api-access-8bhfd\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944859 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-scripts\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944885 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2b9w\" (UniqueName: \"kubernetes.io/projected/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-kube-api-access-w2b9w\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944906 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-scripts\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944924 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-config-data\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944949 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-credential-keys\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944970 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-public-tls-certs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.944997 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-config-data\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.945020 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-internal-tls-certs\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.983233 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-bdff8d974-fdcc5"] Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.983769 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-t5xgj" podStartSLOduration=3.382735511 podStartE2EDuration="50.983760091s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="2025-12-02 18:57:54.593608321 +0000 UTC m=+1305.366500649" lastFinishedPulling="2025-12-02 18:58:42.194632901 +0000 UTC m=+1352.967525229" observedRunningTime="2025-12-02 18:58:43.953047885 +0000 UTC m=+1354.725940213" watchObservedRunningTime="2025-12-02 18:58:43.983760091 +0000 UTC m=+1354.756652419" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.983995 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-combined-ca-bundle\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:43 crc kubenswrapper[4792]: I1202 18:58:43.994234 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-internal-tls-certs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:43.997718 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-fernet-keys\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:43.997994 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-logs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:43.998324 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-public-tls-certs\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.000194 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-combined-ca-bundle\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.001910 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bhfd\" (UniqueName: \"kubernetes.io/projected/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-kube-api-access-8bhfd\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.008625 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-internal-tls-certs\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.014858 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-public-tls-certs\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.015865 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-config-data\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.016549 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-config-data\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.019208 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-scripts\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.034088 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a20a6e66-0ccb-41ae-a2ec-904e1dcada7b-scripts\") pod \"placement-55bc995c96-sn8lv\" (UID: \"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b\") " pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.034317 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-credential-keys\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046273 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2hp9\" (UniqueName: \"kubernetes.io/projected/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-kube-api-access-p2hp9\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046417 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-combined-ca-bundle\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046441 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-combined-ca-bundle\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046474 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-config-data\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046510 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-config-data\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046560 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-logs\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046607 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-config-data-custom\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046637 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-config-data-custom\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046660 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-logs\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.046678 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v62rk\" (UniqueName: \"kubernetes.io/projected/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-kube-api-access-v62rk\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.055165 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2b9w\" (UniqueName: \"kubernetes.io/projected/3baa86cf-f5d1-40e8-90cc-227ecfae98cf-kube-api-access-w2b9w\") pod \"keystone-54b9cc4f54-2nnzj\" (UID: \"3baa86cf-f5d1-40e8-90cc-227ecfae98cf\") " pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.063607 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-thpg5"] Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.065185 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.078223 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.096981 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-thpg5"] Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158494 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-config-data-custom\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158555 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-config-data-custom\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158588 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-logs\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158610 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v62rk\" (UniqueName: \"kubernetes.io/projected/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-kube-api-access-v62rk\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158646 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2hp9\" (UniqueName: \"kubernetes.io/projected/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-kube-api-access-p2hp9\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158682 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-svc\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158725 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158748 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-combined-ca-bundle\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158771 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-combined-ca-bundle\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-config-data\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158813 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-config\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158855 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n954t\" (UniqueName: \"kubernetes.io/projected/c82ec48d-c75a-4989-8f7d-465a3ce22987-kube-api-access-n954t\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-config-data\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158913 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158928 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-logs\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.158946 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.167531 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-logs\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.168138 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-config-data\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.168295 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-logs\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.229745 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-config-data-custom\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.230284 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-config-data-custom\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.230747 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-combined-ca-bundle\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.235135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-combined-ca-bundle\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.240252 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-config-data\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.241225 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v62rk\" (UniqueName: \"kubernetes.io/projected/bbaa6700-f41c-49a4-8593-d0d6ba1a6376-kube-api-access-v62rk\") pod \"barbican-worker-6d85ccb45-9bkkd\" (UID: \"bbaa6700-f41c-49a4-8593-d0d6ba1a6376\") " pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.263114 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2hp9\" (UniqueName: \"kubernetes.io/projected/6e40d936-c5d2-4491-b5c5-9794c4fb73b1-kube-api-access-p2hp9\") pod \"barbican-keystone-listener-bdff8d974-fdcc5\" (UID: \"6e40d936-c5d2-4491-b5c5-9794c4fb73b1\") " pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.263156 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-config\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.263485 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n954t\" (UniqueName: \"kubernetes.io/projected/c82ec48d-c75a-4989-8f7d-465a3ce22987-kube-api-access-n954t\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.263669 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.263756 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.264027 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-svc\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.264221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.264398 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6d85ccb45-9bkkd" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.264065 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-config\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.276083 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.276393 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-svc\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.281935 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.287912 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.316385 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.317918 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n954t\" (UniqueName: \"kubernetes.io/projected/c82ec48d-c75a-4989-8f7d-465a3ce22987-kube-api-access-n954t\") pod \"dnsmasq-dns-85ff748b95-thpg5\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.361655 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6f7549b9fd-4lplx"] Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.363461 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.371943 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.416404 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f7549b9fd-4lplx"] Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.433389 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.475829 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-combined-ca-bundle\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.475875 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data-custom\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.475918 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-logs\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.475940 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwz92\" (UniqueName: \"kubernetes.io/projected/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-kube-api-access-nwz92\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.475979 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.539312 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.582605 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-logs\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.582650 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwz92\" (UniqueName: \"kubernetes.io/projected/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-kube-api-access-nwz92\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.582693 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.582804 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-combined-ca-bundle\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.582826 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data-custom\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.586900 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-logs\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.595299 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.595844 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-combined-ca-bundle\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.609936 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data-custom\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.630382 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwz92\" (UniqueName: \"kubernetes.io/projected/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-kube-api-access-nwz92\") pod \"barbican-api-6f7549b9fd-4lplx\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.723959 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.724798 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.725235 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.793773 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.794426 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.887513 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55bc995c96-sn8lv"] Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.901916 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.902111 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.958785 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.958896 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:58:44 crc kubenswrapper[4792]: I1202 18:58:44.959509 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.060157 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6d85ccb45-9bkkd"] Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.120386 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-thpg5"] Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.309294 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-bdff8d974-fdcc5"] Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.411928 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54b9cc4f54-2nnzj"] Dec 02 18:58:45 crc kubenswrapper[4792]: W1202 18:58:45.477454 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3baa86cf_f5d1_40e8_90cc_227ecfae98cf.slice/crio-e05f274e9d00a7ddb96f50890b0ee3273126285a237d7b519052b690d9b1fd8c WatchSource:0}: Error finding container e05f274e9d00a7ddb96f50890b0ee3273126285a237d7b519052b690d9b1fd8c: Status 404 returned error can't find the container with id e05f274e9d00a7ddb96f50890b0ee3273126285a237d7b519052b690d9b1fd8c Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.533969 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f7549b9fd-4lplx"] Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.938548 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" event={"ID":"6e40d936-c5d2-4491-b5c5-9794c4fb73b1","Type":"ContainerStarted","Data":"e8c1fe710022ade92a1f831e38d10bba02c1c6566e9a01d0b59ebc6207ba223a"} Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.942213 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f7549b9fd-4lplx" event={"ID":"d7d46db5-e66e-435c-a7dd-c5f8fef782ed","Type":"ContainerStarted","Data":"b818e53efb9db2b2a04e812768015efad9f577bfa1308cc0d8410299c389fc3a"} Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.945897 4792 generic.go:334] "Generic (PLEG): container finished" podID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerID="d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b" exitCode=0 Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.945953 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" event={"ID":"c82ec48d-c75a-4989-8f7d-465a3ce22987","Type":"ContainerDied","Data":"d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b"} Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.945976 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" event={"ID":"c82ec48d-c75a-4989-8f7d-465a3ce22987","Type":"ContainerStarted","Data":"68f1531d33561cafb66e6a6d643bf2d5bfdb491c5830798fefea0277bb7ff7b6"} Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.956323 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6d85ccb45-9bkkd" event={"ID":"bbaa6700-f41c-49a4-8593-d0d6ba1a6376","Type":"ContainerStarted","Data":"aac7481b3687b9aab7c8863be9c567364c776f596e79cb8711d2591a895e65b8"} Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.958255 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54b9cc4f54-2nnzj" event={"ID":"3baa86cf-f5d1-40e8-90cc-227ecfae98cf","Type":"ContainerStarted","Data":"e05f274e9d00a7ddb96f50890b0ee3273126285a237d7b519052b690d9b1fd8c"} Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.969980 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55bc995c96-sn8lv" event={"ID":"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b","Type":"ContainerStarted","Data":"f0652d4792b24bf07a3de2ff0b7942bd12b1f343ceb9eea4976c9876e67a85dc"} Dec 02 18:58:45 crc kubenswrapper[4792]: I1202 18:58:45.970052 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55bc995c96-sn8lv" event={"ID":"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b","Type":"ContainerStarted","Data":"3b7bb53423499406fbeec7933b2371466c0c462749b1571b7302a03850cbc773"} Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.006166 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f7549b9fd-4lplx" event={"ID":"d7d46db5-e66e-435c-a7dd-c5f8fef782ed","Type":"ContainerStarted","Data":"1bf757f8b2bb9bf3c37111cdab1a1636c3770eb057a7b7552da4b2de241868df"} Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.006671 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f7549b9fd-4lplx" event={"ID":"d7d46db5-e66e-435c-a7dd-c5f8fef782ed","Type":"ContainerStarted","Data":"577fb68ddf3e7f340fcdfa2a0caffec048e6c09a356e2dbb1a5cd4c3ea1ab179"} Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.007870 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.007894 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.014294 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" event={"ID":"c82ec48d-c75a-4989-8f7d-465a3ce22987","Type":"ContainerStarted","Data":"4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e"} Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.014720 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.025233 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54b9cc4f54-2nnzj" event={"ID":"3baa86cf-f5d1-40e8-90cc-227ecfae98cf","Type":"ContainerStarted","Data":"8675b65047bf302299f3b12b95d94fefef0751f53b197ea112bcfe874d4842be"} Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.027866 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.033509 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55bc995c96-sn8lv" event={"ID":"a20a6e66-0ccb-41ae-a2ec-904e1dcada7b","Type":"ContainerStarted","Data":"4c816e9acf0288db1ccece1f3716f9adbbb2f0c16d33dc97c51b96bd7538e24c"} Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.034974 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.035005 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.060635 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" podStartSLOduration=4.060613368 podStartE2EDuration="4.060613368s" podCreationTimestamp="2025-12-02 18:58:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:47.054868639 +0000 UTC m=+1357.827760967" watchObservedRunningTime="2025-12-02 18:58:47.060613368 +0000 UTC m=+1357.833505716" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.060886 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6f7549b9fd-4lplx" podStartSLOduration=3.060878435 podStartE2EDuration="3.060878435s" podCreationTimestamp="2025-12-02 18:58:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:47.022136541 +0000 UTC m=+1357.795028869" watchObservedRunningTime="2025-12-02 18:58:47.060878435 +0000 UTC m=+1357.833770773" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.083062 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-55bc995c96-sn8lv" podStartSLOduration=4.083046669 podStartE2EDuration="4.083046669s" podCreationTimestamp="2025-12-02 18:58:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:47.07998461 +0000 UTC m=+1357.852876958" watchObservedRunningTime="2025-12-02 18:58:47.083046669 +0000 UTC m=+1357.855938997" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.098071 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-54b9cc4f54-2nnzj" podStartSLOduration=4.098056278 podStartE2EDuration="4.098056278s" podCreationTimestamp="2025-12-02 18:58:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:47.096036186 +0000 UTC m=+1357.868928504" watchObservedRunningTime="2025-12-02 18:58:47.098056278 +0000 UTC m=+1357.870948596" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.446656 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-75d777bcc8-l485p"] Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.448863 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.451025 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.453753 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.462866 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75d777bcc8-l485p"] Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.588119 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-config-data-custom\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.588432 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-internal-tls-certs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.588567 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/260c73f4-d8d2-4178-924a-81703068a4f6-logs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.588721 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-public-tls-certs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.589025 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-combined-ca-bundle\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.589124 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p72c\" (UniqueName: \"kubernetes.io/projected/260c73f4-d8d2-4178-924a-81703068a4f6-kube-api-access-4p72c\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.589327 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-config-data\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.691015 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p72c\" (UniqueName: \"kubernetes.io/projected/260c73f4-d8d2-4178-924a-81703068a4f6-kube-api-access-4p72c\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.691389 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-config-data\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.691422 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-config-data-custom\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.691445 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-internal-tls-certs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.691476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/260c73f4-d8d2-4178-924a-81703068a4f6-logs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.692191 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/260c73f4-d8d2-4178-924a-81703068a4f6-logs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.691704 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-public-tls-certs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.693056 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-combined-ca-bundle\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.697249 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-public-tls-certs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.700230 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-config-data-custom\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.701615 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-config-data\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.709098 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-internal-tls-certs\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.709584 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p72c\" (UniqueName: \"kubernetes.io/projected/260c73f4-d8d2-4178-924a-81703068a4f6-kube-api-access-4p72c\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.715718 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/260c73f4-d8d2-4178-924a-81703068a4f6-combined-ca-bundle\") pod \"barbican-api-75d777bcc8-l485p\" (UID: \"260c73f4-d8d2-4178-924a-81703068a4f6\") " pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.725225 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.725364 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.726069 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 18:58:47 crc kubenswrapper[4792]: I1202 18:58:47.775744 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:48 crc kubenswrapper[4792]: I1202 18:58:48.919188 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75d777bcc8-l485p"] Dec 02 18:58:50 crc kubenswrapper[4792]: I1202 18:58:50.073051 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75d777bcc8-l485p" event={"ID":"260c73f4-d8d2-4178-924a-81703068a4f6","Type":"ContainerStarted","Data":"b8d74efde30364d6a3280bc71e8bb39cd1f67a208c6cde532aca7f92ac63242c"} Dec 02 18:58:52 crc kubenswrapper[4792]: I1202 18:58:52.093777 4792 generic.go:334] "Generic (PLEG): container finished" podID="50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" containerID="c54b987c8c922ff0e0f587c87930474283cf03e99ed0e59de82bbccb35f85fed" exitCode=0 Dec 02 18:58:52 crc kubenswrapper[4792]: I1202 18:58:52.094135 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-t5xgj" event={"ID":"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5","Type":"ContainerDied","Data":"c54b987c8c922ff0e0f587c87930474283cf03e99ed0e59de82bbccb35f85fed"} Dec 02 18:58:54 crc kubenswrapper[4792]: I1202 18:58:54.437660 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:58:54 crc kubenswrapper[4792]: I1202 18:58:54.499224 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4d5bj"] Dec 02 18:58:54 crc kubenswrapper[4792]: I1202 18:58:54.499702 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerName="dnsmasq-dns" containerID="cri-o://1340723c8cb786b6643bcc1d4da66b7941aa1f104c91ba38db06f74153d8b11f" gracePeriod=10 Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.125581 4792 generic.go:334] "Generic (PLEG): container finished" podID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerID="1340723c8cb786b6643bcc1d4da66b7941aa1f104c91ba38db06f74153d8b11f" exitCode=0 Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.125631 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" event={"ID":"37e3266a-8a6c-4472-9bcd-459157fba9c8","Type":"ContainerDied","Data":"1340723c8cb786b6643bcc1d4da66b7941aa1f104c91ba38db06f74153d8b11f"} Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.596756 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.682950 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-db-sync-config-data\") pod \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.683071 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-etc-machine-id\") pod \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.683138 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-combined-ca-bundle\") pod \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.683213 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-config-data\") pod \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.683385 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-scripts\") pod \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.683458 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwqvz\" (UniqueName: \"kubernetes.io/projected/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-kube-api-access-wwqvz\") pod \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\" (UID: \"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5\") " Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.692226 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-kube-api-access-wwqvz" (OuterVolumeSpecName: "kube-api-access-wwqvz") pod "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" (UID: "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5"). InnerVolumeSpecName "kube-api-access-wwqvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.693115 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" (UID: "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.693372 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" (UID: "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.718051 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-scripts" (OuterVolumeSpecName: "scripts") pod "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" (UID: "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.768938 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-config-data" (OuterVolumeSpecName: "config-data") pod "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" (UID: "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.785500 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.785537 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwqvz\" (UniqueName: \"kubernetes.io/projected/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-kube-api-access-wwqvz\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.785547 4792 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.785556 4792 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.785565 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.794426 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" (UID: "50f2a0fa-fcf2-4f6c-be51-b78ae811fce5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.879955 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:58:55 crc kubenswrapper[4792]: I1202 18:58:55.887101 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.020134 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.089674 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-swift-storage-0\") pod \"37e3266a-8a6c-4472-9bcd-459157fba9c8\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.089740 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-nb\") pod \"37e3266a-8a6c-4472-9bcd-459157fba9c8\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.089783 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7scw\" (UniqueName: \"kubernetes.io/projected/37e3266a-8a6c-4472-9bcd-459157fba9c8-kube-api-access-s7scw\") pod \"37e3266a-8a6c-4472-9bcd-459157fba9c8\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.089804 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-config\") pod \"37e3266a-8a6c-4472-9bcd-459157fba9c8\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.089904 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-svc\") pod \"37e3266a-8a6c-4472-9bcd-459157fba9c8\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.089931 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-sb\") pod \"37e3266a-8a6c-4472-9bcd-459157fba9c8\" (UID: \"37e3266a-8a6c-4472-9bcd-459157fba9c8\") " Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.133185 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37e3266a-8a6c-4472-9bcd-459157fba9c8-kube-api-access-s7scw" (OuterVolumeSpecName: "kube-api-access-s7scw") pod "37e3266a-8a6c-4472-9bcd-459157fba9c8" (UID: "37e3266a-8a6c-4472-9bcd-459157fba9c8"). InnerVolumeSpecName "kube-api-access-s7scw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.145475 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" event={"ID":"37e3266a-8a6c-4472-9bcd-459157fba9c8","Type":"ContainerDied","Data":"09ab05cc8ca897b1a1d13e3db1f5407a89ca3a167c53f57686f6eec682214ea0"} Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.145621 4792 scope.go:117] "RemoveContainer" containerID="1340723c8cb786b6643bcc1d4da66b7941aa1f104c91ba38db06f74153d8b11f" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.145489 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.150168 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-t5xgj" event={"ID":"50f2a0fa-fcf2-4f6c-be51-b78ae811fce5","Type":"ContainerDied","Data":"a07d2672e64572a33a45e5d32e8e54e381e3ba86d071dab5c9b0a9eabf92fa1b"} Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.150204 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-t5xgj" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.150210 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a07d2672e64572a33a45e5d32e8e54e381e3ba86d071dab5c9b0a9eabf92fa1b" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.169795 4792 scope.go:117] "RemoveContainer" containerID="85a445d1592a140b8836b86170fd79ad5c3b274ea82493aae16c14f84c6531bc" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.191960 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7scw\" (UniqueName: \"kubernetes.io/projected/37e3266a-8a6c-4472-9bcd-459157fba9c8-kube-api-access-s7scw\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.404081 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "37e3266a-8a6c-4472-9bcd-459157fba9c8" (UID: "37e3266a-8a6c-4472-9bcd-459157fba9c8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.412138 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "37e3266a-8a6c-4472-9bcd-459157fba9c8" (UID: "37e3266a-8a6c-4472-9bcd-459157fba9c8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.413046 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-config" (OuterVolumeSpecName: "config") pod "37e3266a-8a6c-4472-9bcd-459157fba9c8" (UID: "37e3266a-8a6c-4472-9bcd-459157fba9c8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.421108 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "37e3266a-8a6c-4472-9bcd-459157fba9c8" (UID: "37e3266a-8a6c-4472-9bcd-459157fba9c8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.446307 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "37e3266a-8a6c-4472-9bcd-459157fba9c8" (UID: "37e3266a-8a6c-4472-9bcd-459157fba9c8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.501023 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.501051 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.501064 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.501076 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.501084 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e3266a-8a6c-4472-9bcd-459157fba9c8-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.522041 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.651402 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4d5bj"] Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.652135 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.661641 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4d5bj"] Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.873570 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:58:56 crc kubenswrapper[4792]: E1202 18:58:56.873908 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerName="init" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.873920 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerName="init" Dec 02 18:58:56 crc kubenswrapper[4792]: E1202 18:58:56.873935 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" containerName="cinder-db-sync" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.873941 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" containerName="cinder-db-sync" Dec 02 18:58:56 crc kubenswrapper[4792]: E1202 18:58:56.873962 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerName="dnsmasq-dns" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.873968 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerName="dnsmasq-dns" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.874153 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" containerName="cinder-db-sync" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.874176 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerName="dnsmasq-dns" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.875144 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.880334 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.880559 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.880681 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.881076 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-b8gkz" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.924577 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.927815 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77344f11-ce8b-4b69-b7c8-89492c0ae045-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.927874 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdp5w\" (UniqueName: \"kubernetes.io/projected/77344f11-ce8b-4b69-b7c8-89492c0ae045-kube-api-access-wdp5w\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.927900 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.927915 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-scripts\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.927966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:56 crc kubenswrapper[4792]: I1202 18:58:56.928017 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.029620 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77344f11-ce8b-4b69-b7c8-89492c0ae045-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.029691 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdp5w\" (UniqueName: \"kubernetes.io/projected/77344f11-ce8b-4b69-b7c8-89492c0ae045-kube-api-access-wdp5w\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.029718 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.029733 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-scripts\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.029793 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.029843 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.038677 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77344f11-ce8b-4b69-b7c8-89492c0ae045-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.039542 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-g5f72"] Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.039967 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.041115 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.061108 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.062141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.062540 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-scripts\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.065791 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-g5f72"] Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.136616 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.136664 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-config\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.136709 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.136768 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.136811 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.136866 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs7l2\" (UniqueName: \"kubernetes.io/projected/d440050a-59d7-4ea9-95a3-ca0b3beff640-kube-api-access-gs7l2\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.139848 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdp5w\" (UniqueName: \"kubernetes.io/projected/77344f11-ce8b-4b69-b7c8-89492c0ae045-kube-api-access-wdp5w\") pod \"cinder-scheduler-0\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.203421 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6d85ccb45-9bkkd" event={"ID":"bbaa6700-f41c-49a4-8593-d0d6ba1a6376","Type":"ContainerStarted","Data":"4ef95d9ac78984a9bc0875490865648e47a0a9a8004f3358c958160d0fc279dc"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.203465 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6d85ccb45-9bkkd" event={"ID":"bbaa6700-f41c-49a4-8593-d0d6ba1a6376","Type":"ContainerStarted","Data":"492422e90bb683f0c10aa053375e59d259f3441554d26c4e9fb44811fbb2b92b"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.211011 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.216730 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wh2hr" event={"ID":"3df2077b-8a01-47ae-ad22-abfc02071c24","Type":"ContainerStarted","Data":"43670588f68762a39538e2ac8c4445d578cff5ff796b43ec83ec7fcb3c0445c2"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.225410 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" event={"ID":"6e40d936-c5d2-4491-b5c5-9794c4fb73b1","Type":"ContainerStarted","Data":"d72c0fc87d1395f033eb9f8abb92aca8e1638c1cd05c6a94411f3d2f22dbd9e8"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.225468 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" event={"ID":"6e40d936-c5d2-4491-b5c5-9794c4fb73b1","Type":"ContainerStarted","Data":"bb7469e5ac94d2d52570e76d6edf2ccd10ba2a234b15950ff653d0804cc05787"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.240820 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-config\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.240882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.240946 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.240992 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.241096 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs7l2\" (UniqueName: \"kubernetes.io/projected/d440050a-59d7-4ea9-95a3-ca0b3beff640-kube-api-access-gs7l2\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.241146 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.241608 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-config\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.241797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.243102 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.243739 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.244237 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.244983 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6d85ccb45-9bkkd" podStartSLOduration=3.825176948 podStartE2EDuration="14.240853237s" podCreationTimestamp="2025-12-02 18:58:43 +0000 UTC" firstStartedPulling="2025-12-02 18:58:45.122253821 +0000 UTC m=+1355.895146149" lastFinishedPulling="2025-12-02 18:58:55.53793011 +0000 UTC m=+1366.310822438" observedRunningTime="2025-12-02 18:58:57.239737529 +0000 UTC m=+1368.012629857" watchObservedRunningTime="2025-12-02 18:58:57.240853237 +0000 UTC m=+1368.013745565" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.249779 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerStarted","Data":"1436ccceea2824fcc6c0baeffaa97cab146be36645a845fd10f8ff3e11bcf7dd"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.249937 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-central-agent" containerID="cri-o://fccd05acdfae0190d57adb67e59ec0a292fdbda6b3beca1954a9d7ea96a1bdf9" gracePeriod=30 Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.250204 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.250248 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="proxy-httpd" containerID="cri-o://1436ccceea2824fcc6c0baeffaa97cab146be36645a845fd10f8ff3e11bcf7dd" gracePeriod=30 Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.250287 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="sg-core" containerID="cri-o://6728c502e45749fb7df30e2cd1d3d936b66e05d1ff271c0a71f57f64de33cae8" gracePeriod=30 Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.250322 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-notification-agent" containerID="cri-o://23c4030bca0c0424a8ff69891fb0f6de0457c694bc6a0c79ccf55185d7c4d351" gracePeriod=30 Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.273568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75d777bcc8-l485p" event={"ID":"260c73f4-d8d2-4178-924a-81703068a4f6","Type":"ContainerStarted","Data":"ea429c6e0728766bb94ffdaa016e1cb41e7d2f084acb88507fedf1aec5621848"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.273608 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.273618 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.273626 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75d777bcc8-l485p" event={"ID":"260c73f4-d8d2-4178-924a-81703068a4f6","Type":"ContainerStarted","Data":"a8521e2843f5aaf91919bec5bfcfbe52897d5664a19901a99cd5203b537f4668"} Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.287154 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-wh2hr" podStartSLOduration=3.453159137 podStartE2EDuration="1m4.287140847s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="2025-12-02 18:57:54.633890925 +0000 UTC m=+1305.406783253" lastFinishedPulling="2025-12-02 18:58:55.467872635 +0000 UTC m=+1366.240764963" observedRunningTime="2025-12-02 18:58:57.284114719 +0000 UTC m=+1368.057007047" watchObservedRunningTime="2025-12-02 18:58:57.287140847 +0000 UTC m=+1368.060033165" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.288231 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs7l2\" (UniqueName: \"kubernetes.io/projected/d440050a-59d7-4ea9-95a3-ca0b3beff640-kube-api-access-gs7l2\") pod \"dnsmasq-dns-5c9776ccc5-g5f72\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.331572 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-bdff8d974-fdcc5" podStartSLOduration=4.151894754 podStartE2EDuration="14.331553618s" podCreationTimestamp="2025-12-02 18:58:43 +0000 UTC" firstStartedPulling="2025-12-02 18:58:45.355554396 +0000 UTC m=+1356.128446724" lastFinishedPulling="2025-12-02 18:58:55.53521326 +0000 UTC m=+1366.308105588" observedRunningTime="2025-12-02 18:58:57.311930949 +0000 UTC m=+1368.084823287" watchObservedRunningTime="2025-12-02 18:58:57.331553618 +0000 UTC m=+1368.104445946" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.381199 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.382896 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.386493 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.387227 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.389002 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-75d777bcc8-l485p" podStartSLOduration=10.388988036 podStartE2EDuration="10.388988036s" podCreationTimestamp="2025-12-02 18:58:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:58:57.385127596 +0000 UTC m=+1368.158019914" watchObservedRunningTime="2025-12-02 18:58:57.388988036 +0000 UTC m=+1368.161880364" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.441973 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.445114698 podStartE2EDuration="1m4.441952868s" podCreationTimestamp="2025-12-02 18:57:53 +0000 UTC" firstStartedPulling="2025-12-02 18:57:54.696424705 +0000 UTC m=+1305.469317033" lastFinishedPulling="2025-12-02 18:58:55.693262865 +0000 UTC m=+1366.466155203" observedRunningTime="2025-12-02 18:58:57.411265863 +0000 UTC m=+1368.184158191" watchObservedRunningTime="2025-12-02 18:58:57.441952868 +0000 UTC m=+1368.214845196" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.449359 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.449395 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqftc\" (UniqueName: \"kubernetes.io/projected/2095942a-5571-47db-b97f-1b3611ad17ae-kube-api-access-lqftc\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.449432 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data-custom\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.449476 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2095942a-5571-47db-b97f-1b3611ad17ae-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.449499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.449532 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-scripts\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.449595 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2095942a-5571-47db-b97f-1b3611ad17ae-logs\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.513966 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.551244 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2095942a-5571-47db-b97f-1b3611ad17ae-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.551289 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.551315 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-scripts\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.551378 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2095942a-5571-47db-b97f-1b3611ad17ae-logs\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.551424 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqftc\" (UniqueName: \"kubernetes.io/projected/2095942a-5571-47db-b97f-1b3611ad17ae-kube-api-access-lqftc\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.551439 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.551471 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data-custom\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.554603 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2095942a-5571-47db-b97f-1b3611ad17ae-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.555182 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2095942a-5571-47db-b97f-1b3611ad17ae-logs\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.560569 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.560575 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.560953 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data-custom\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.575042 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" path="/var/lib/kubelet/pods/37e3266a-8a6c-4472-9bcd-459157fba9c8/volumes" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.582040 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqftc\" (UniqueName: \"kubernetes.io/projected/2095942a-5571-47db-b97f-1b3611ad17ae-kube-api-access-lqftc\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.583623 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-scripts\") pod \"cinder-api-0\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.750009 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 18:58:57 crc kubenswrapper[4792]: I1202 18:58:57.983149 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.071864 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5c7cc4d64c-s885f" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.168805 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-dbdbd8fdb-qfn5f"] Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.169332 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-dbdbd8fdb-qfn5f" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-api" containerID="cri-o://4300cdeec8594eb1591851d81d17471828b13074e67a7bdcb89053867bc4d3b4" gracePeriod=30 Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.169616 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-dbdbd8fdb-qfn5f" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-httpd" containerID="cri-o://2a1bf3cca136c798737aa7c278cec751d646cc89e57e234d02cfd751a9034823" gracePeriod=30 Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.179451 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-g5f72"] Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322710 4792 generic.go:334] "Generic (PLEG): container finished" podID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerID="1436ccceea2824fcc6c0baeffaa97cab146be36645a845fd10f8ff3e11bcf7dd" exitCode=0 Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322737 4792 generic.go:334] "Generic (PLEG): container finished" podID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerID="6728c502e45749fb7df30e2cd1d3d936b66e05d1ff271c0a71f57f64de33cae8" exitCode=2 Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322744 4792 generic.go:334] "Generic (PLEG): container finished" podID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerID="23c4030bca0c0424a8ff69891fb0f6de0457c694bc6a0c79ccf55185d7c4d351" exitCode=0 Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322752 4792 generic.go:334] "Generic (PLEG): container finished" podID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerID="fccd05acdfae0190d57adb67e59ec0a292fdbda6b3beca1954a9d7ea96a1bdf9" exitCode=0 Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322792 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerDied","Data":"1436ccceea2824fcc6c0baeffaa97cab146be36645a845fd10f8ff3e11bcf7dd"} Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322816 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerDied","Data":"6728c502e45749fb7df30e2cd1d3d936b66e05d1ff271c0a71f57f64de33cae8"} Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322825 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerDied","Data":"23c4030bca0c0424a8ff69891fb0f6de0457c694bc6a0c79ccf55185d7c4d351"} Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.322833 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerDied","Data":"fccd05acdfae0190d57adb67e59ec0a292fdbda6b3beca1954a9d7ea96a1bdf9"} Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.340640 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" event={"ID":"d440050a-59d7-4ea9-95a3-ca0b3beff640","Type":"ContainerStarted","Data":"d50e667cd1fb476ff0006457797fdc2304d65cd274f272d091cf86bc9973e8e0"} Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.355756 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"77344f11-ce8b-4b69-b7c8-89492c0ae045","Type":"ContainerStarted","Data":"4497588b5b53893e07250547f5a77a4562a893575def622e4faa01a5afd32538"} Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.669554 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.670804 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.786279 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-scripts\") pod \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.786564 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-log-httpd\") pod \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.786623 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-run-httpd\") pod \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.786710 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-combined-ca-bundle\") pod \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.786824 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-sg-core-conf-yaml\") pod \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.786850 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-config-data\") pod \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.786883 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czpkc\" (UniqueName: \"kubernetes.io/projected/bdde8f20-2325-4180-85b9-72a2b2fefe9f-kube-api-access-czpkc\") pod \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\" (UID: \"bdde8f20-2325-4180-85b9-72a2b2fefe9f\") " Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.787915 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bdde8f20-2325-4180-85b9-72a2b2fefe9f" (UID: "bdde8f20-2325-4180-85b9-72a2b2fefe9f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.788054 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bdde8f20-2325-4180-85b9-72a2b2fefe9f" (UID: "bdde8f20-2325-4180-85b9-72a2b2fefe9f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.801177 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-scripts" (OuterVolumeSpecName: "scripts") pod "bdde8f20-2325-4180-85b9-72a2b2fefe9f" (UID: "bdde8f20-2325-4180-85b9-72a2b2fefe9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.802764 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdde8f20-2325-4180-85b9-72a2b2fefe9f-kube-api-access-czpkc" (OuterVolumeSpecName: "kube-api-access-czpkc") pod "bdde8f20-2325-4180-85b9-72a2b2fefe9f" (UID: "bdde8f20-2325-4180-85b9-72a2b2fefe9f"). InnerVolumeSpecName "kube-api-access-czpkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.865955 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bdde8f20-2325-4180-85b9-72a2b2fefe9f" (UID: "bdde8f20-2325-4180-85b9-72a2b2fefe9f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.891488 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.891697 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czpkc\" (UniqueName: \"kubernetes.io/projected/bdde8f20-2325-4180-85b9-72a2b2fefe9f-kube-api-access-czpkc\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.891760 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.891815 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.891881 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bdde8f20-2325-4180-85b9-72a2b2fefe9f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.954690 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bdde8f20-2325-4180-85b9-72a2b2fefe9f" (UID: "bdde8f20-2325-4180-85b9-72a2b2fefe9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:58 crc kubenswrapper[4792]: I1202 18:58:58.998381 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.041048 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-config-data" (OuterVolumeSpecName: "config-data") pod "bdde8f20-2325-4180-85b9-72a2b2fefe9f" (UID: "bdde8f20-2325-4180-85b9-72a2b2fefe9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.100123 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdde8f20-2325-4180-85b9-72a2b2fefe9f-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.385458 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bdde8f20-2325-4180-85b9-72a2b2fefe9f","Type":"ContainerDied","Data":"914fcd4638c39e6fc6fa59a75a0bd773febe5625ba4e542b1afa29ec76574e69"} Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.385510 4792 scope.go:117] "RemoveContainer" containerID="1436ccceea2824fcc6c0baeffaa97cab146be36645a845fd10f8ff3e11bcf7dd" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.385633 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.394483 4792 generic.go:334] "Generic (PLEG): container finished" podID="4e8170b3-9772-49c8-af59-87f59120f79e" containerID="2a1bf3cca136c798737aa7c278cec751d646cc89e57e234d02cfd751a9034823" exitCode=0 Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.394557 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdbd8fdb-qfn5f" event={"ID":"4e8170b3-9772-49c8-af59-87f59120f79e","Type":"ContainerDied","Data":"2a1bf3cca136c798737aa7c278cec751d646cc89e57e234d02cfd751a9034823"} Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.396424 4792 generic.go:334] "Generic (PLEG): container finished" podID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerID="14ed918c594319e33686fe22c1cfca2b59d830769068b32319c2f10325c12a23" exitCode=0 Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.396468 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" event={"ID":"d440050a-59d7-4ea9-95a3-ca0b3beff640","Type":"ContainerDied","Data":"14ed918c594319e33686fe22c1cfca2b59d830769068b32319c2f10325c12a23"} Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.416261 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2095942a-5571-47db-b97f-1b3611ad17ae","Type":"ContainerStarted","Data":"fa2477cae2ad34a9d00e7839ce18cdfb5153a76a8b42715767c5a4a426b912b3"} Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.467935 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.487587 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.496794 4792 scope.go:117] "RemoveContainer" containerID="6728c502e45749fb7df30e2cd1d3d936b66e05d1ff271c0a71f57f64de33cae8" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.497678 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:58:59 crc kubenswrapper[4792]: E1202 18:58:59.498074 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-central-agent" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498092 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-central-agent" Dec 02 18:58:59 crc kubenswrapper[4792]: E1202 18:58:59.498133 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="proxy-httpd" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498139 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="proxy-httpd" Dec 02 18:58:59 crc kubenswrapper[4792]: E1202 18:58:59.498151 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-notification-agent" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498157 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-notification-agent" Dec 02 18:58:59 crc kubenswrapper[4792]: E1202 18:58:59.498166 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="sg-core" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498174 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="sg-core" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498374 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="sg-core" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498397 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-central-agent" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498411 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="ceilometer-notification-agent" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.498423 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" containerName="proxy-httpd" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.501687 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.505593 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.505600 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.516970 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.551726 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdde8f20-2325-4180-85b9-72a2b2fefe9f" path="/var/lib/kubelet/pods/bdde8f20-2325-4180-85b9-72a2b2fefe9f/volumes" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.598940 4792 scope.go:117] "RemoveContainer" containerID="23c4030bca0c0424a8ff69891fb0f6de0457c694bc6a0c79ccf55185d7c4d351" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.614336 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-run-httpd\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.614436 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.614548 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-scripts\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.614629 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sbdb\" (UniqueName: \"kubernetes.io/projected/a165cea6-0a2d-4386-8db8-aaf30d21d213-kube-api-access-8sbdb\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.614649 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-config-data\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.614687 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.614749 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-log-httpd\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.684917 4792 scope.go:117] "RemoveContainer" containerID="fccd05acdfae0190d57adb67e59ec0a292fdbda6b3beca1954a9d7ea96a1bdf9" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.716420 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.716481 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-scripts\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.716561 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sbdb\" (UniqueName: \"kubernetes.io/projected/a165cea6-0a2d-4386-8db8-aaf30d21d213-kube-api-access-8sbdb\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.716583 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-config-data\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.716612 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.716661 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-log-httpd\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.716683 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-run-httpd\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.717080 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-run-httpd\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.717463 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-log-httpd\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.720719 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-scripts\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.721108 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.724374 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-config-data\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.740193 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.743175 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sbdb\" (UniqueName: \"kubernetes.io/projected/a165cea6-0a2d-4386-8db8-aaf30d21d213-kube-api-access-8sbdb\") pod \"ceilometer-0\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " pod="openstack/ceilometer-0" Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.820295 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:58:59 crc kubenswrapper[4792]: I1202 18:58:59.990332 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:00 crc kubenswrapper[4792]: I1202 18:59:00.439085 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" event={"ID":"d440050a-59d7-4ea9-95a3-ca0b3beff640","Type":"ContainerStarted","Data":"45e1fbbb485affa2365cd04248d2f3c1cb5430f0170aeea240b30792e8dc840c"} Dec 02 18:59:00 crc kubenswrapper[4792]: I1202 18:59:00.439515 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:59:00 crc kubenswrapper[4792]: I1202 18:59:00.442732 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"77344f11-ce8b-4b69-b7c8-89492c0ae045","Type":"ContainerStarted","Data":"ca783563b855034fd155cc28b1e580cd02169610f8b95c508cc9d26cbd6ccd83"} Dec 02 18:59:00 crc kubenswrapper[4792]: I1202 18:59:00.445206 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2095942a-5571-47db-b97f-1b3611ad17ae","Type":"ContainerStarted","Data":"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941"} Dec 02 18:59:00 crc kubenswrapper[4792]: I1202 18:59:00.463515 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" podStartSLOduration=4.462710122 podStartE2EDuration="4.462710122s" podCreationTimestamp="2025-12-02 18:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:00.460801042 +0000 UTC m=+1371.233693370" watchObservedRunningTime="2025-12-02 18:59:00.462710122 +0000 UTC m=+1371.235602460" Dec 02 18:59:00 crc kubenswrapper[4792]: I1202 18:59:00.554112 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:00 crc kubenswrapper[4792]: W1202 18:59:00.562071 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda165cea6_0a2d_4386_8db8_aaf30d21d213.slice/crio-fcf826e1bb7cc75961f1c85f554a593e29b9344cc5fed1b73fec88a3a9c8b007 WatchSource:0}: Error finding container fcf826e1bb7cc75961f1c85f554a593e29b9344cc5fed1b73fec88a3a9c8b007: Status 404 returned error can't find the container with id fcf826e1bb7cc75961f1c85f554a593e29b9344cc5fed1b73fec88a3a9c8b007 Dec 02 18:59:00 crc kubenswrapper[4792]: I1202 18:59:00.812250 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-55f844cf75-4d5bj" podUID="37e3266a-8a6c-4472-9bcd-459157fba9c8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.169:5353: i/o timeout" Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.457848 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerStarted","Data":"d951f879e180a1733e6caf68b83f64d3278a977cae86369951e62abb1031895c"} Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.458318 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerStarted","Data":"fcf826e1bb7cc75961f1c85f554a593e29b9344cc5fed1b73fec88a3a9c8b007"} Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.459713 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"77344f11-ce8b-4b69-b7c8-89492c0ae045","Type":"ContainerStarted","Data":"b9916f2a5501389ec630d168caa306ff10053da7b7757b3119501cd7f351177c"} Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.461847 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api-log" containerID="cri-o://5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941" gracePeriod=30 Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.462111 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2095942a-5571-47db-b97f-1b3611ad17ae","Type":"ContainerStarted","Data":"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b"} Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.462151 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.462186 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api" containerID="cri-o://d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b" gracePeriod=30 Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.483147 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.690217807 podStartE2EDuration="5.483129963s" podCreationTimestamp="2025-12-02 18:58:56 +0000 UTC" firstStartedPulling="2025-12-02 18:58:58.03825705 +0000 UTC m=+1368.811149378" lastFinishedPulling="2025-12-02 18:58:58.831169216 +0000 UTC m=+1369.604061534" observedRunningTime="2025-12-02 18:59:01.48300938 +0000 UTC m=+1372.255901728" watchObservedRunningTime="2025-12-02 18:59:01.483129963 +0000 UTC m=+1372.256022291" Dec 02 18:59:01 crc kubenswrapper[4792]: I1202 18:59:01.503656 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.503636624 podStartE2EDuration="4.503636624s" podCreationTimestamp="2025-12-02 18:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:01.50348671 +0000 UTC m=+1372.276379028" watchObservedRunningTime="2025-12-02 18:59:01.503636624 +0000 UTC m=+1372.276528952" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.005451 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.081926 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2095942a-5571-47db-b97f-1b3611ad17ae-etc-machine-id\") pod \"2095942a-5571-47db-b97f-1b3611ad17ae\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.082077 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-scripts\") pod \"2095942a-5571-47db-b97f-1b3611ad17ae\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.082138 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-combined-ca-bundle\") pod \"2095942a-5571-47db-b97f-1b3611ad17ae\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.082181 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data-custom\") pod \"2095942a-5571-47db-b97f-1b3611ad17ae\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.082208 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data\") pod \"2095942a-5571-47db-b97f-1b3611ad17ae\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.082642 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2095942a-5571-47db-b97f-1b3611ad17ae-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2095942a-5571-47db-b97f-1b3611ad17ae" (UID: "2095942a-5571-47db-b97f-1b3611ad17ae"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.083292 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqftc\" (UniqueName: \"kubernetes.io/projected/2095942a-5571-47db-b97f-1b3611ad17ae-kube-api-access-lqftc\") pod \"2095942a-5571-47db-b97f-1b3611ad17ae\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.083445 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2095942a-5571-47db-b97f-1b3611ad17ae-logs\") pod \"2095942a-5571-47db-b97f-1b3611ad17ae\" (UID: \"2095942a-5571-47db-b97f-1b3611ad17ae\") " Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.084288 4792 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2095942a-5571-47db-b97f-1b3611ad17ae-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.084681 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2095942a-5571-47db-b97f-1b3611ad17ae-logs" (OuterVolumeSpecName: "logs") pod "2095942a-5571-47db-b97f-1b3611ad17ae" (UID: "2095942a-5571-47db-b97f-1b3611ad17ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.088199 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2095942a-5571-47db-b97f-1b3611ad17ae" (UID: "2095942a-5571-47db-b97f-1b3611ad17ae"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.089383 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-scripts" (OuterVolumeSpecName: "scripts") pod "2095942a-5571-47db-b97f-1b3611ad17ae" (UID: "2095942a-5571-47db-b97f-1b3611ad17ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.089500 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2095942a-5571-47db-b97f-1b3611ad17ae-kube-api-access-lqftc" (OuterVolumeSpecName: "kube-api-access-lqftc") pod "2095942a-5571-47db-b97f-1b3611ad17ae" (UID: "2095942a-5571-47db-b97f-1b3611ad17ae"). InnerVolumeSpecName "kube-api-access-lqftc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.133304 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2095942a-5571-47db-b97f-1b3611ad17ae" (UID: "2095942a-5571-47db-b97f-1b3611ad17ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.145620 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data" (OuterVolumeSpecName: "config-data") pod "2095942a-5571-47db-b97f-1b3611ad17ae" (UID: "2095942a-5571-47db-b97f-1b3611ad17ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.186312 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.186339 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.186350 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.186357 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2095942a-5571-47db-b97f-1b3611ad17ae-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.186366 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqftc\" (UniqueName: \"kubernetes.io/projected/2095942a-5571-47db-b97f-1b3611ad17ae-kube-api-access-lqftc\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.186375 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2095942a-5571-47db-b97f-1b3611ad17ae-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.212481 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.482825 4792 generic.go:334] "Generic (PLEG): container finished" podID="3df2077b-8a01-47ae-ad22-abfc02071c24" containerID="43670588f68762a39538e2ac8c4445d578cff5ff796b43ec83ec7fcb3c0445c2" exitCode=0 Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.483293 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wh2hr" event={"ID":"3df2077b-8a01-47ae-ad22-abfc02071c24","Type":"ContainerDied","Data":"43670588f68762a39538e2ac8c4445d578cff5ff796b43ec83ec7fcb3c0445c2"} Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.489210 4792 generic.go:334] "Generic (PLEG): container finished" podID="2095942a-5571-47db-b97f-1b3611ad17ae" containerID="d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b" exitCode=0 Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.489252 4792 generic.go:334] "Generic (PLEG): container finished" podID="2095942a-5571-47db-b97f-1b3611ad17ae" containerID="5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941" exitCode=143 Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.489319 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2095942a-5571-47db-b97f-1b3611ad17ae","Type":"ContainerDied","Data":"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b"} Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.489363 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2095942a-5571-47db-b97f-1b3611ad17ae","Type":"ContainerDied","Data":"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941"} Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.489385 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2095942a-5571-47db-b97f-1b3611ad17ae","Type":"ContainerDied","Data":"fa2477cae2ad34a9d00e7839ce18cdfb5153a76a8b42715767c5a4a426b912b3"} Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.489409 4792 scope.go:117] "RemoveContainer" containerID="d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.489608 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.507753 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerStarted","Data":"0b13d655c1ac5b114467df92a6370247532aa915621d1370baee4276003eca17"} Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.554624 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.559491 4792 scope.go:117] "RemoveContainer" containerID="5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.566017 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.594217 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:59:02 crc kubenswrapper[4792]: E1202 18:59:02.594780 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.594798 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api" Dec 02 18:59:02 crc kubenswrapper[4792]: E1202 18:59:02.594828 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api-log" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.594835 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api-log" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.595139 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.595160 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" containerName="cinder-api-log" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.596571 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.602462 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.602977 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.603120 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.607949 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.664171 4792 scope.go:117] "RemoveContainer" containerID="d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b" Dec 02 18:59:02 crc kubenswrapper[4792]: E1202 18:59:02.666734 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b\": container with ID starting with d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b not found: ID does not exist" containerID="d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.666773 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b"} err="failed to get container status \"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b\": rpc error: code = NotFound desc = could not find container \"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b\": container with ID starting with d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b not found: ID does not exist" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.666796 4792 scope.go:117] "RemoveContainer" containerID="5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941" Dec 02 18:59:02 crc kubenswrapper[4792]: E1202 18:59:02.667260 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941\": container with ID starting with 5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941 not found: ID does not exist" containerID="5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.667328 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941"} err="failed to get container status \"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941\": rpc error: code = NotFound desc = could not find container \"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941\": container with ID starting with 5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941 not found: ID does not exist" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.667383 4792 scope.go:117] "RemoveContainer" containerID="d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.667689 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b"} err="failed to get container status \"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b\": rpc error: code = NotFound desc = could not find container \"d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b\": container with ID starting with d266e9f24c69b50d36c30b53f7d11a56cbfa6d8c86dc184c9f90b1f844ff513b not found: ID does not exist" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.667726 4792 scope.go:117] "RemoveContainer" containerID="5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.668190 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941"} err="failed to get container status \"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941\": rpc error: code = NotFound desc = could not find container \"5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941\": container with ID starting with 5d958d97b6e0aa6be6ddb548fdf01b436880e0d114a56d3228a32676ea451941 not found: ID does not exist" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700022 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-config-data\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-config-data-custom\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700095 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-scripts\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700193 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zz9r\" (UniqueName: \"kubernetes.io/projected/2b99d2b7-ae25-4088-8cae-a3f6151e735f-kube-api-access-6zz9r\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700215 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b99d2b7-ae25-4088-8cae-a3f6151e735f-logs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700234 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700459 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700647 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b99d2b7-ae25-4088-8cae-a3f6151e735f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.700829 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.802907 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803304 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-config-data\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803360 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-config-data-custom\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803400 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-scripts\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803539 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zz9r\" (UniqueName: \"kubernetes.io/projected/2b99d2b7-ae25-4088-8cae-a3f6151e735f-kube-api-access-6zz9r\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b99d2b7-ae25-4088-8cae-a3f6151e735f-logs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803635 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803720 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803780 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b99d2b7-ae25-4088-8cae-a3f6151e735f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803927 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2b99d2b7-ae25-4088-8cae-a3f6151e735f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.803993 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b99d2b7-ae25-4088-8cae-a3f6151e735f-logs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.807095 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.807608 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-config-data-custom\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.812171 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.812301 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.813955 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-scripts\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.827260 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zz9r\" (UniqueName: \"kubernetes.io/projected/2b99d2b7-ae25-4088-8cae-a3f6151e735f-kube-api-access-6zz9r\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.838726 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b99d2b7-ae25-4088-8cae-a3f6151e735f-config-data\") pod \"cinder-api-0\" (UID: \"2b99d2b7-ae25-4088-8cae-a3f6151e735f\") " pod="openstack/cinder-api-0" Dec 02 18:59:02 crc kubenswrapper[4792]: I1202 18:59:02.976074 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 18:59:03 crc kubenswrapper[4792]: I1202 18:59:03.519640 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerStarted","Data":"e2cd29cd81de34e9c4f059737f1aa19ffc896f08f746b414f5b36d4d29bf74f0"} Dec 02 18:59:03 crc kubenswrapper[4792]: I1202 18:59:03.557180 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2095942a-5571-47db-b97f-1b3611ad17ae" path="/var/lib/kubelet/pods/2095942a-5571-47db-b97f-1b3611ad17ae/volumes" Dec 02 18:59:03 crc kubenswrapper[4792]: I1202 18:59:03.559400 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 18:59:03 crc kubenswrapper[4792]: W1202 18:59:03.567797 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b99d2b7_ae25_4088_8cae_a3f6151e735f.slice/crio-22990157176fcbe5f6e646a761b465613a595721e2a261f8240a127557c426a0 WatchSource:0}: Error finding container 22990157176fcbe5f6e646a761b465613a595721e2a261f8240a127557c426a0: Status 404 returned error can't find the container with id 22990157176fcbe5f6e646a761b465613a595721e2a261f8240a127557c426a0 Dec 02 18:59:03 crc kubenswrapper[4792]: I1202 18:59:03.982619 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.137928 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-combined-ca-bundle\") pod \"3df2077b-8a01-47ae-ad22-abfc02071c24\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.138210 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km57l\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-kube-api-access-km57l\") pod \"3df2077b-8a01-47ae-ad22-abfc02071c24\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.138285 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-certs\") pod \"3df2077b-8a01-47ae-ad22-abfc02071c24\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.138409 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-config-data\") pod \"3df2077b-8a01-47ae-ad22-abfc02071c24\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.138451 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-scripts\") pod \"3df2077b-8a01-47ae-ad22-abfc02071c24\" (UID: \"3df2077b-8a01-47ae-ad22-abfc02071c24\") " Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.213369 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-scripts" (OuterVolumeSpecName: "scripts") pod "3df2077b-8a01-47ae-ad22-abfc02071c24" (UID: "3df2077b-8a01-47ae-ad22-abfc02071c24"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.213415 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3df2077b-8a01-47ae-ad22-abfc02071c24" (UID: "3df2077b-8a01-47ae-ad22-abfc02071c24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.213516 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-kube-api-access-km57l" (OuterVolumeSpecName: "kube-api-access-km57l") pod "3df2077b-8a01-47ae-ad22-abfc02071c24" (UID: "3df2077b-8a01-47ae-ad22-abfc02071c24"). InnerVolumeSpecName "kube-api-access-km57l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.215194 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-config-data" (OuterVolumeSpecName: "config-data") pod "3df2077b-8a01-47ae-ad22-abfc02071c24" (UID: "3df2077b-8a01-47ae-ad22-abfc02071c24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.216390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-certs" (OuterVolumeSpecName: "certs") pod "3df2077b-8a01-47ae-ad22-abfc02071c24" (UID: "3df2077b-8a01-47ae-ad22-abfc02071c24"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.240606 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.240640 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.240662 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3df2077b-8a01-47ae-ad22-abfc02071c24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.240672 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km57l\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-kube-api-access-km57l\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.240681 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3df2077b-8a01-47ae-ad22-abfc02071c24-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.541086 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b99d2b7-ae25-4088-8cae-a3f6151e735f","Type":"ContainerStarted","Data":"5a5ecbfcb0639b6cc64c84bb7821791c8178df939a3a07455d3a0a17c8fe4156"} Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.541400 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b99d2b7-ae25-4088-8cae-a3f6151e735f","Type":"ContainerStarted","Data":"22990157176fcbe5f6e646a761b465613a595721e2a261f8240a127557c426a0"} Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.549953 4792 generic.go:334] "Generic (PLEG): container finished" podID="4e8170b3-9772-49c8-af59-87f59120f79e" containerID="4300cdeec8594eb1591851d81d17471828b13074e67a7bdcb89053867bc4d3b4" exitCode=0 Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.550002 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdbd8fdb-qfn5f" event={"ID":"4e8170b3-9772-49c8-af59-87f59120f79e","Type":"ContainerDied","Data":"4300cdeec8594eb1591851d81d17471828b13074e67a7bdcb89053867bc4d3b4"} Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.555925 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-wh2hr" event={"ID":"3df2077b-8a01-47ae-ad22-abfc02071c24","Type":"ContainerDied","Data":"48f282c063107b45771188d8b610cc5864a27c291988453d1d2346276a04253a"} Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.555951 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48f282c063107b45771188d8b610cc5864a27c291988453d1d2346276a04253a" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.556001 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-wh2hr" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.562557 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerStarted","Data":"0d7bf3e3cca871b6aa15ce858d7e613f1823c41d0ac60b552f6e26eae853b127"} Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.563259 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.677320 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.333102876 podStartE2EDuration="5.677289569s" podCreationTimestamp="2025-12-02 18:58:59 +0000 UTC" firstStartedPulling="2025-12-02 18:59:00.564362726 +0000 UTC m=+1371.337255054" lastFinishedPulling="2025-12-02 18:59:03.908549419 +0000 UTC m=+1374.681441747" observedRunningTime="2025-12-02 18:59:04.591977598 +0000 UTC m=+1375.364869926" watchObservedRunningTime="2025-12-02 18:59:04.677289569 +0000 UTC m=+1375.450181897" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.677808 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-sj984"] Dec 02 18:59:04 crc kubenswrapper[4792]: E1202 18:59:04.678208 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3df2077b-8a01-47ae-ad22-abfc02071c24" containerName="cloudkitty-db-sync" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.678224 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3df2077b-8a01-47ae-ad22-abfc02071c24" containerName="cloudkitty-db-sync" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.678419 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3df2077b-8a01-47ae-ad22-abfc02071c24" containerName="cloudkitty-db-sync" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.679280 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.683427 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.683449 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.685190 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-qndk8" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.686509 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.686752 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.699697 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-sj984"] Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.797172 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.854905 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k58zv\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-kube-api-access-k58zv\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.855039 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-scripts\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.855112 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-config-data\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.855327 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-certs\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.855383 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-combined-ca-bundle\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.936348 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.957359 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-certs\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.957407 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-combined-ca-bundle\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.957448 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k58zv\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-kube-api-access-k58zv\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.957535 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-scripts\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.958727 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-config-data\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.963465 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-scripts\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.965366 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-config-data\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.967067 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-certs\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.991430 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k58zv\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-kube-api-access-k58zv\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:04 crc kubenswrapper[4792]: I1202 18:59:04.993041 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-combined-ca-bundle\") pod \"cloudkitty-storageinit-sj984\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.013286 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75d777bcc8-l485p" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.014438 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.065634 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qx6x\" (UniqueName: \"kubernetes.io/projected/4e8170b3-9772-49c8-af59-87f59120f79e-kube-api-access-6qx6x\") pod \"4e8170b3-9772-49c8-af59-87f59120f79e\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.065806 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-config\") pod \"4e8170b3-9772-49c8-af59-87f59120f79e\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.065895 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-combined-ca-bundle\") pod \"4e8170b3-9772-49c8-af59-87f59120f79e\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.065987 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-ovndb-tls-certs\") pod \"4e8170b3-9772-49c8-af59-87f59120f79e\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.066019 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-httpd-config\") pod \"4e8170b3-9772-49c8-af59-87f59120f79e\" (UID: \"4e8170b3-9772-49c8-af59-87f59120f79e\") " Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.077351 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e8170b3-9772-49c8-af59-87f59120f79e-kube-api-access-6qx6x" (OuterVolumeSpecName: "kube-api-access-6qx6x") pod "4e8170b3-9772-49c8-af59-87f59120f79e" (UID: "4e8170b3-9772-49c8-af59-87f59120f79e"). InnerVolumeSpecName "kube-api-access-6qx6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.077545 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "4e8170b3-9772-49c8-af59-87f59120f79e" (UID: "4e8170b3-9772-49c8-af59-87f59120f79e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.113152 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6f7549b9fd-4lplx"] Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.113354 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6f7549b9fd-4lplx" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api-log" containerID="cri-o://1bf757f8b2bb9bf3c37111cdab1a1636c3770eb057a7b7552da4b2de241868df" gracePeriod=30 Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.114637 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6f7549b9fd-4lplx" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api" containerID="cri-o://577fb68ddf3e7f340fcdfa2a0caffec048e6c09a356e2dbb1a5cd4c3ea1ab179" gracePeriod=30 Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.168485 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.168749 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qx6x\" (UniqueName: \"kubernetes.io/projected/4e8170b3-9772-49c8-af59-87f59120f79e-kube-api-access-6qx6x\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.186727 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-config" (OuterVolumeSpecName: "config") pod "4e8170b3-9772-49c8-af59-87f59120f79e" (UID: "4e8170b3-9772-49c8-af59-87f59120f79e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.188429 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e8170b3-9772-49c8-af59-87f59120f79e" (UID: "4e8170b3-9772-49c8-af59-87f59120f79e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.231481 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "4e8170b3-9772-49c8-af59-87f59120f79e" (UID: "4e8170b3-9772-49c8-af59-87f59120f79e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.272213 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.272248 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.272265 4792 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e8170b3-9772-49c8-af59-87f59120f79e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:05 crc kubenswrapper[4792]: W1202 18:59:05.530077 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod091d90c2_7a16_4474_8f4e_981342297fac.slice/crio-7875fe5efec8cd5ac2c4e38aba12bcd76a5f2d5af007c816bb7bafcf7b29395f WatchSource:0}: Error finding container 7875fe5efec8cd5ac2c4e38aba12bcd76a5f2d5af007c816bb7bafcf7b29395f: Status 404 returned error can't find the container with id 7875fe5efec8cd5ac2c4e38aba12bcd76a5f2d5af007c816bb7bafcf7b29395f Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.532969 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-sj984"] Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.580274 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2b99d2b7-ae25-4088-8cae-a3f6151e735f","Type":"ContainerStarted","Data":"f4ce1048b4b8f9ee69e4d855b5bd184443c1df33778fb0d8d00e44a4ed54c140"} Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.581664 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.584904 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdbd8fdb-qfn5f" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.584900 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdbd8fdb-qfn5f" event={"ID":"4e8170b3-9772-49c8-af59-87f59120f79e","Type":"ContainerDied","Data":"61422a3266f50940e7054734aa641517271a0c79ea252bc5535ff1204b79aa6f"} Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.585146 4792 scope.go:117] "RemoveContainer" containerID="2a1bf3cca136c798737aa7c278cec751d646cc89e57e234d02cfd751a9034823" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.588565 4792 generic.go:334] "Generic (PLEG): container finished" podID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerID="1bf757f8b2bb9bf3c37111cdab1a1636c3770eb057a7b7552da4b2de241868df" exitCode=143 Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.588626 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f7549b9fd-4lplx" event={"ID":"d7d46db5-e66e-435c-a7dd-c5f8fef782ed","Type":"ContainerDied","Data":"1bf757f8b2bb9bf3c37111cdab1a1636c3770eb057a7b7552da4b2de241868df"} Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.596010 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sj984" event={"ID":"091d90c2-7a16-4474-8f4e-981342297fac","Type":"ContainerStarted","Data":"7875fe5efec8cd5ac2c4e38aba12bcd76a5f2d5af007c816bb7bafcf7b29395f"} Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.614319 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.6143025189999998 podStartE2EDuration="3.614302519s" podCreationTimestamp="2025-12-02 18:59:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:05.607554014 +0000 UTC m=+1376.380446362" watchObservedRunningTime="2025-12-02 18:59:05.614302519 +0000 UTC m=+1376.387194847" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.719759 4792 scope.go:117] "RemoveContainer" containerID="4300cdeec8594eb1591851d81d17471828b13074e67a7bdcb89053867bc4d3b4" Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.722411 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-dbdbd8fdb-qfn5f"] Dec 02 18:59:05 crc kubenswrapper[4792]: I1202 18:59:05.749312 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-dbdbd8fdb-qfn5f"] Dec 02 18:59:06 crc kubenswrapper[4792]: I1202 18:59:06.608196 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sj984" event={"ID":"091d90c2-7a16-4474-8f4e-981342297fac","Type":"ContainerStarted","Data":"4549abf865afee3fd2aee942b4a1195a222f2f47a9607b87926af8e8f824dd3d"} Dec 02 18:59:06 crc kubenswrapper[4792]: I1202 18:59:06.630233 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-sj984" podStartSLOduration=2.630214482 podStartE2EDuration="2.630214482s" podCreationTimestamp="2025-12-02 18:59:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:06.62202712 +0000 UTC m=+1377.394919448" watchObservedRunningTime="2025-12-02 18:59:06.630214482 +0000 UTC m=+1377.403106810" Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.402010 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.462284 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.518827 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.562994 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" path="/var/lib/kubelet/pods/4e8170b3-9772-49c8-af59-87f59120f79e/volumes" Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.612577 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-thpg5"] Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.613887 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" podUID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerName="dnsmasq-dns" containerID="cri-o://4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e" gracePeriod=10 Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.621815 4792 generic.go:334] "Generic (PLEG): container finished" podID="091d90c2-7a16-4474-8f4e-981342297fac" containerID="4549abf865afee3fd2aee942b4a1195a222f2f47a9607b87926af8e8f824dd3d" exitCode=0 Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.622043 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="cinder-scheduler" containerID="cri-o://ca783563b855034fd155cc28b1e580cd02169610f8b95c508cc9d26cbd6ccd83" gracePeriod=30 Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.622194 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sj984" event={"ID":"091d90c2-7a16-4474-8f4e-981342297fac","Type":"ContainerDied","Data":"4549abf865afee3fd2aee942b4a1195a222f2f47a9607b87926af8e8f824dd3d"} Dec 02 18:59:07 crc kubenswrapper[4792]: I1202 18:59:07.622964 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="probe" containerID="cri-o://b9916f2a5501389ec630d168caa306ff10053da7b7757b3119501cd7f351177c" gracePeriod=30 Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.081619 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.081893 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.081928 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.082666 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dc04fc6a49e6cf5090b01aa7a72bef9189df42704e3c79ebf0699d5a961190bd"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.082712 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://dc04fc6a49e6cf5090b01aa7a72bef9189df42704e3c79ebf0699d5a961190bd" gracePeriod=600 Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.176352 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.333164 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n954t\" (UniqueName: \"kubernetes.io/projected/c82ec48d-c75a-4989-8f7d-465a3ce22987-kube-api-access-n954t\") pod \"c82ec48d-c75a-4989-8f7d-465a3ce22987\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.333242 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-config\") pod \"c82ec48d-c75a-4989-8f7d-465a3ce22987\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.333274 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-svc\") pod \"c82ec48d-c75a-4989-8f7d-465a3ce22987\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.333351 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-sb\") pod \"c82ec48d-c75a-4989-8f7d-465a3ce22987\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.333413 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-nb\") pod \"c82ec48d-c75a-4989-8f7d-465a3ce22987\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.333470 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-swift-storage-0\") pod \"c82ec48d-c75a-4989-8f7d-465a3ce22987\" (UID: \"c82ec48d-c75a-4989-8f7d-465a3ce22987\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.359901 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c82ec48d-c75a-4989-8f7d-465a3ce22987-kube-api-access-n954t" (OuterVolumeSpecName: "kube-api-access-n954t") pod "c82ec48d-c75a-4989-8f7d-465a3ce22987" (UID: "c82ec48d-c75a-4989-8f7d-465a3ce22987"). InnerVolumeSpecName "kube-api-access-n954t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.389581 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-config" (OuterVolumeSpecName: "config") pod "c82ec48d-c75a-4989-8f7d-465a3ce22987" (UID: "c82ec48d-c75a-4989-8f7d-465a3ce22987"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.396547 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c82ec48d-c75a-4989-8f7d-465a3ce22987" (UID: "c82ec48d-c75a-4989-8f7d-465a3ce22987"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.407011 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c82ec48d-c75a-4989-8f7d-465a3ce22987" (UID: "c82ec48d-c75a-4989-8f7d-465a3ce22987"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.413893 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c82ec48d-c75a-4989-8f7d-465a3ce22987" (UID: "c82ec48d-c75a-4989-8f7d-465a3ce22987"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.433223 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c82ec48d-c75a-4989-8f7d-465a3ce22987" (UID: "c82ec48d-c75a-4989-8f7d-465a3ce22987"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.438435 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n954t\" (UniqueName: \"kubernetes.io/projected/c82ec48d-c75a-4989-8f7d-465a3ce22987-kube-api-access-n954t\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.438459 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.438468 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.438477 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.438485 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.438494 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c82ec48d-c75a-4989-8f7d-465a3ce22987-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.655061 4792 generic.go:334] "Generic (PLEG): container finished" podID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerID="577fb68ddf3e7f340fcdfa2a0caffec048e6c09a356e2dbb1a5cd4c3ea1ab179" exitCode=0 Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.655165 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f7549b9fd-4lplx" event={"ID":"d7d46db5-e66e-435c-a7dd-c5f8fef782ed","Type":"ContainerDied","Data":"577fb68ddf3e7f340fcdfa2a0caffec048e6c09a356e2dbb1a5cd4c3ea1ab179"} Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.659536 4792 generic.go:334] "Generic (PLEG): container finished" podID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerID="4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e" exitCode=0 Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.659627 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.660128 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" event={"ID":"c82ec48d-c75a-4989-8f7d-465a3ce22987","Type":"ContainerDied","Data":"4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e"} Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.660165 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-thpg5" event={"ID":"c82ec48d-c75a-4989-8f7d-465a3ce22987","Type":"ContainerDied","Data":"68f1531d33561cafb66e6a6d643bf2d5bfdb491c5830798fefea0277bb7ff7b6"} Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.660181 4792 scope.go:117] "RemoveContainer" containerID="4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.670806 4792 generic.go:334] "Generic (PLEG): container finished" podID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerID="b9916f2a5501389ec630d168caa306ff10053da7b7757b3119501cd7f351177c" exitCode=0 Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.670856 4792 generic.go:334] "Generic (PLEG): container finished" podID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerID="ca783563b855034fd155cc28b1e580cd02169610f8b95c508cc9d26cbd6ccd83" exitCode=0 Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.670976 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"77344f11-ce8b-4b69-b7c8-89492c0ae045","Type":"ContainerDied","Data":"b9916f2a5501389ec630d168caa306ff10053da7b7757b3119501cd7f351177c"} Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.671023 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"77344f11-ce8b-4b69-b7c8-89492c0ae045","Type":"ContainerDied","Data":"ca783563b855034fd155cc28b1e580cd02169610f8b95c508cc9d26cbd6ccd83"} Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.683127 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="dc04fc6a49e6cf5090b01aa7a72bef9189df42704e3c79ebf0699d5a961190bd" exitCode=0 Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.683308 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"dc04fc6a49e6cf5090b01aa7a72bef9189df42704e3c79ebf0699d5a961190bd"} Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.683334 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523"} Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.700216 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.702850 4792 scope.go:117] "RemoveContainer" containerID="d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.708230 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-thpg5"] Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.722040 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-thpg5"] Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.855283 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-logs\") pod \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.855384 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwz92\" (UniqueName: \"kubernetes.io/projected/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-kube-api-access-nwz92\") pod \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.855413 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data\") pod \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.855460 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-combined-ca-bundle\") pod \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.855564 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data-custom\") pod \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\" (UID: \"d7d46db5-e66e-435c-a7dd-c5f8fef782ed\") " Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.858882 4792 scope.go:117] "RemoveContainer" containerID="4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e" Dec 02 18:59:08 crc kubenswrapper[4792]: E1202 18:59:08.859652 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e\": container with ID starting with 4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e not found: ID does not exist" containerID="4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.859676 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e"} err="failed to get container status \"4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e\": rpc error: code = NotFound desc = could not find container \"4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e\": container with ID starting with 4ebd4c5eae3cd1008c5faeddf802cd394ee1b79c5a302fcf65b326a67908921e not found: ID does not exist" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.859694 4792 scope.go:117] "RemoveContainer" containerID="d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.861680 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-kube-api-access-nwz92" (OuterVolumeSpecName: "kube-api-access-nwz92") pod "d7d46db5-e66e-435c-a7dd-c5f8fef782ed" (UID: "d7d46db5-e66e-435c-a7dd-c5f8fef782ed"). InnerVolumeSpecName "kube-api-access-nwz92". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.862386 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-logs" (OuterVolumeSpecName: "logs") pod "d7d46db5-e66e-435c-a7dd-c5f8fef782ed" (UID: "d7d46db5-e66e-435c-a7dd-c5f8fef782ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: E1202 18:59:08.862589 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b\": container with ID starting with d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b not found: ID does not exist" containerID="d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.862632 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b"} err="failed to get container status \"d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b\": rpc error: code = NotFound desc = could not find container \"d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b\": container with ID starting with d0c0b241965293ee1fd652dca3b8c24989f6b4924891f7035754fc8cd3815f6b not found: ID does not exist" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.862661 4792 scope.go:117] "RemoveContainer" containerID="38cdfcc4ac221e244e725dd6d1a5012b531f01f9bc318459a440b509a9410447" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.865437 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d7d46db5-e66e-435c-a7dd-c5f8fef782ed" (UID: "d7d46db5-e66e-435c-a7dd-c5f8fef782ed"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.914857 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7d46db5-e66e-435c-a7dd-c5f8fef782ed" (UID: "d7d46db5-e66e-435c-a7dd-c5f8fef782ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.953021 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data" (OuterVolumeSpecName: "config-data") pod "d7d46db5-e66e-435c-a7dd-c5f8fef782ed" (UID: "d7d46db5-e66e-435c-a7dd-c5f8fef782ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.966233 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.967571 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.967583 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwz92\" (UniqueName: \"kubernetes.io/projected/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-kube-api-access-nwz92\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.967595 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:08 crc kubenswrapper[4792]: I1202 18:59:08.967603 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d46db5-e66e-435c-a7dd-c5f8fef782ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.045671 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.178859 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77344f11-ce8b-4b69-b7c8-89492c0ae045-etc-machine-id\") pod \"77344f11-ce8b-4b69-b7c8-89492c0ae045\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.179045 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-combined-ca-bundle\") pod \"77344f11-ce8b-4b69-b7c8-89492c0ae045\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.179050 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/77344f11-ce8b-4b69-b7c8-89492c0ae045-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "77344f11-ce8b-4b69-b7c8-89492c0ae045" (UID: "77344f11-ce8b-4b69-b7c8-89492c0ae045"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.179103 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdp5w\" (UniqueName: \"kubernetes.io/projected/77344f11-ce8b-4b69-b7c8-89492c0ae045-kube-api-access-wdp5w\") pod \"77344f11-ce8b-4b69-b7c8-89492c0ae045\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.179168 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data-custom\") pod \"77344f11-ce8b-4b69-b7c8-89492c0ae045\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.179356 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-scripts\") pod \"77344f11-ce8b-4b69-b7c8-89492c0ae045\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.179398 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data\") pod \"77344f11-ce8b-4b69-b7c8-89492c0ae045\" (UID: \"77344f11-ce8b-4b69-b7c8-89492c0ae045\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.179928 4792 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77344f11-ce8b-4b69-b7c8-89492c0ae045-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.183764 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "77344f11-ce8b-4b69-b7c8-89492c0ae045" (UID: "77344f11-ce8b-4b69-b7c8-89492c0ae045"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.184296 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77344f11-ce8b-4b69-b7c8-89492c0ae045-kube-api-access-wdp5w" (OuterVolumeSpecName: "kube-api-access-wdp5w") pod "77344f11-ce8b-4b69-b7c8-89492c0ae045" (UID: "77344f11-ce8b-4b69-b7c8-89492c0ae045"). InnerVolumeSpecName "kube-api-access-wdp5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.193912 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-scripts" (OuterVolumeSpecName: "scripts") pod "77344f11-ce8b-4b69-b7c8-89492c0ae045" (UID: "77344f11-ce8b-4b69-b7c8-89492c0ae045"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.252036 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.290645 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdp5w\" (UniqueName: \"kubernetes.io/projected/77344f11-ce8b-4b69-b7c8-89492c0ae045-kube-api-access-wdp5w\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.290688 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.290702 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.297556 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77344f11-ce8b-4b69-b7c8-89492c0ae045" (UID: "77344f11-ce8b-4b69-b7c8-89492c0ae045"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.321036 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data" (OuterVolumeSpecName: "config-data") pod "77344f11-ce8b-4b69-b7c8-89492c0ae045" (UID: "77344f11-ce8b-4b69-b7c8-89492c0ae045"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.391443 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-scripts\") pod \"091d90c2-7a16-4474-8f4e-981342297fac\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.391543 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-certs\") pod \"091d90c2-7a16-4474-8f4e-981342297fac\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.391616 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-config-data\") pod \"091d90c2-7a16-4474-8f4e-981342297fac\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.391650 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k58zv\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-kube-api-access-k58zv\") pod \"091d90c2-7a16-4474-8f4e-981342297fac\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.391712 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-combined-ca-bundle\") pod \"091d90c2-7a16-4474-8f4e-981342297fac\" (UID: \"091d90c2-7a16-4474-8f4e-981342297fac\") " Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.392073 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.392092 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77344f11-ce8b-4b69-b7c8-89492c0ae045-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.399880 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-scripts" (OuterVolumeSpecName: "scripts") pod "091d90c2-7a16-4474-8f4e-981342297fac" (UID: "091d90c2-7a16-4474-8f4e-981342297fac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.400103 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-certs" (OuterVolumeSpecName: "certs") pod "091d90c2-7a16-4474-8f4e-981342297fac" (UID: "091d90c2-7a16-4474-8f4e-981342297fac"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.422722 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-kube-api-access-k58zv" (OuterVolumeSpecName: "kube-api-access-k58zv") pod "091d90c2-7a16-4474-8f4e-981342297fac" (UID: "091d90c2-7a16-4474-8f4e-981342297fac"). InnerVolumeSpecName "kube-api-access-k58zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.447505 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "091d90c2-7a16-4474-8f4e-981342297fac" (UID: "091d90c2-7a16-4474-8f4e-981342297fac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.450806 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-config-data" (OuterVolumeSpecName: "config-data") pod "091d90c2-7a16-4474-8f4e-981342297fac" (UID: "091d90c2-7a16-4474-8f4e-981342297fac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.494209 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.494246 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.494256 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.494288 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k58zv\" (UniqueName: \"kubernetes.io/projected/091d90c2-7a16-4474-8f4e-981342297fac-kube-api-access-k58zv\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.494300 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/091d90c2-7a16-4474-8f4e-981342297fac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.553402 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c82ec48d-c75a-4989-8f7d-465a3ce22987" path="/var/lib/kubelet/pods/c82ec48d-c75a-4989-8f7d-465a3ce22987/volumes" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.693908 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f7549b9fd-4lplx" event={"ID":"d7d46db5-e66e-435c-a7dd-c5f8fef782ed","Type":"ContainerDied","Data":"b818e53efb9db2b2a04e812768015efad9f577bfa1308cc0d8410299c389fc3a"} Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.693980 4792 scope.go:117] "RemoveContainer" containerID="577fb68ddf3e7f340fcdfa2a0caffec048e6c09a356e2dbb1a5cd4c3ea1ab179" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.693924 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f7549b9fd-4lplx" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.698495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sj984" event={"ID":"091d90c2-7a16-4474-8f4e-981342297fac","Type":"ContainerDied","Data":"7875fe5efec8cd5ac2c4e38aba12bcd76a5f2d5af007c816bb7bafcf7b29395f"} Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.698544 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7875fe5efec8cd5ac2c4e38aba12bcd76a5f2d5af007c816bb7bafcf7b29395f" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.698583 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sj984" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.705630 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.705633 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"77344f11-ce8b-4b69-b7c8-89492c0ae045","Type":"ContainerDied","Data":"4497588b5b53893e07250547f5a77a4562a893575def622e4faa01a5afd32538"} Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.734572 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6f7549b9fd-4lplx"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.734705 4792 scope.go:117] "RemoveContainer" containerID="1bf757f8b2bb9bf3c37111cdab1a1636c3770eb057a7b7552da4b2de241868df" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.740663 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6f7549b9fd-4lplx"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.764600 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.783150 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.818603 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819086 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerName="init" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819098 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerName="init" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819113 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="probe" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819119 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="probe" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819135 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="cinder-scheduler" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819141 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="cinder-scheduler" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819153 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819159 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819171 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="091d90c2-7a16-4474-8f4e-981342297fac" containerName="cloudkitty-storageinit" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819177 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="091d90c2-7a16-4474-8f4e-981342297fac" containerName="cloudkitty-storageinit" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819184 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api-log" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819190 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api-log" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819205 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-httpd" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819211 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-httpd" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819226 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerName="dnsmasq-dns" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819232 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerName="dnsmasq-dns" Dec 02 18:59:09 crc kubenswrapper[4792]: E1202 18:59:09.819251 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-api" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819257 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-api" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819423 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="probe" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819432 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-api" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819445 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api-log" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819457 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="091d90c2-7a16-4474-8f4e-981342297fac" containerName="cloudkitty-storageinit" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819464 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e8170b3-9772-49c8-af59-87f59120f79e" containerName="neutron-httpd" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819472 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c82ec48d-c75a-4989-8f7d-465a3ce22987" containerName="dnsmasq-dns" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819489 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" containerName="barbican-api" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.819503 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" containerName="cinder-scheduler" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.820553 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.826635 4792 scope.go:117] "RemoveContainer" containerID="b9916f2a5501389ec630d168caa306ff10053da7b7757b3119501cd7f351177c" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.827304 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.832153 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.855503 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.876891 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.882094 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-qndk8" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.882271 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.882393 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.882500 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.882624 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.883059 4792 scope.go:117] "RemoveContainer" containerID="ca783563b855034fd155cc28b1e580cd02169610f8b95c508cc9d26cbd6ccd83" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.892904 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913117 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e9df765-ce65-43eb-bdcc-344fb7f68889-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913156 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913182 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdgpw\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-kube-api-access-kdgpw\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913202 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913219 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913246 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-config-data\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-scripts\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913338 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-certs\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913355 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv8vz\" (UniqueName: \"kubernetes.io/projected/4e9df765-ce65-43eb-bdcc-344fb7f68889-kube-api-access-fv8vz\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913414 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-scripts\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913429 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.913453 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.971836 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-c9bv9"] Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.973598 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:09 crc kubenswrapper[4792]: I1202 18:59:09.979706 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-c9bv9"] Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015394 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e9df765-ce65-43eb-bdcc-344fb7f68889-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015437 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015464 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdgpw\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-kube-api-access-kdgpw\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015485 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015506 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015535 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015560 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-svc\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015578 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-config-data\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015596 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-config\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015632 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-scripts\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015673 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-certs\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015691 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv8vz\" (UniqueName: \"kubernetes.io/projected/4e9df765-ce65-43eb-bdcc-344fb7f68889-kube-api-access-fv8vz\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015707 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015775 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-scripts\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015793 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015810 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ckb6\" (UniqueName: \"kubernetes.io/projected/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-kube-api-access-5ckb6\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.015830 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.017850 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e9df765-ce65-43eb-bdcc-344fb7f68889-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.024739 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-scripts\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.045143 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-scripts\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.045286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-certs\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.047135 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.049198 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.049840 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdgpw\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-kube-api-access-kdgpw\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.050304 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.050488 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-config-data\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.050660 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.051314 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.054546 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data\") pod \"cloudkitty-proc-0\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.054978 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.055002 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e9df765-ce65-43eb-bdcc-344fb7f68889-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.056767 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv8vz\" (UniqueName: \"kubernetes.io/projected/4e9df765-ce65-43eb-bdcc-344fb7f68889-kube-api-access-fv8vz\") pod \"cinder-scheduler-0\" (UID: \"4e9df765-ce65-43eb-bdcc-344fb7f68889\") " pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.067002 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117223 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117300 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-scripts\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117358 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ckb6\" (UniqueName: \"kubernetes.io/projected/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-kube-api-access-5ckb6\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117423 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-logs\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtsmq\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-kube-api-access-mtsmq\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117505 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117568 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-svc\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117588 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117624 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-config\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117652 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117713 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-certs\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.117751 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.118603 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.119070 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.119263 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-svc\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.119691 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.121212 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-config\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.140941 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ckb6\" (UniqueName: \"kubernetes.io/projected/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-kube-api-access-5ckb6\") pod \"dnsmasq-dns-67bdc55879-c9bv9\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.145494 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219027 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-scripts\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219114 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-logs\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219140 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtsmq\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-kube-api-access-mtsmq\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219170 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219203 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219231 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219276 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-certs\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.219701 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-logs\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.222516 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-certs\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.223670 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-scripts\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.225960 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.226746 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.231913 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.233339 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.241645 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtsmq\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-kube-api-access-mtsmq\") pod \"cloudkitty-api-0\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.306045 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.480579 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.727099 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 18:59:10 crc kubenswrapper[4792]: W1202 18:59:10.823546 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63ce0ba8_1889_450a_a6a2_c55bba3e5dc4.slice/crio-f676bb1fc7746f9168749d10f30acada14baeb9f5bda53d3bb16949619c6d0be WatchSource:0}: Error finding container f676bb1fc7746f9168749d10f30acada14baeb9f5bda53d3bb16949619c6d0be: Status 404 returned error can't find the container with id f676bb1fc7746f9168749d10f30acada14baeb9f5bda53d3bb16949619c6d0be Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.826760 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:10 crc kubenswrapper[4792]: W1202 18:59:10.859367 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod876ebd0d_b0ea_4897_a4eb_d8b3acaf592a.slice/crio-9a2253f2590baeaf21f6f2336ec3d978a220879887e418dfec0a6df7df7de5bd WatchSource:0}: Error finding container 9a2253f2590baeaf21f6f2336ec3d978a220879887e418dfec0a6df7df7de5bd: Status 404 returned error can't find the container with id 9a2253f2590baeaf21f6f2336ec3d978a220879887e418dfec0a6df7df7de5bd Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.864648 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:10 crc kubenswrapper[4792]: I1202 18:59:10.908072 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-c9bv9"] Dec 02 18:59:10 crc kubenswrapper[4792]: W1202 18:59:10.910500 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf05e35a9_ee56_406e_a58b_ec43e8c76dcf.slice/crio-be005de97aed1a4b16838470ee4b48748e3b693d6d50f92d1237ba111e4b85d8 WatchSource:0}: Error finding container be005de97aed1a4b16838470ee4b48748e3b693d6d50f92d1237ba111e4b85d8: Status 404 returned error can't find the container with id be005de97aed1a4b16838470ee4b48748e3b693d6d50f92d1237ba111e4b85d8 Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.561858 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77344f11-ce8b-4b69-b7c8-89492c0ae045" path="/var/lib/kubelet/pods/77344f11-ce8b-4b69-b7c8-89492c0ae045/volumes" Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.562984 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7d46db5-e66e-435c-a7dd-c5f8fef782ed" path="/var/lib/kubelet/pods/d7d46db5-e66e-435c-a7dd-c5f8fef782ed/volumes" Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.763563 4792 generic.go:334] "Generic (PLEG): container finished" podID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerID="99c7a90d2b43aa129dfd5528603d5619a7fe1345a284c0a1747fb00543be5ada" exitCode=0 Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.763644 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" event={"ID":"f05e35a9-ee56-406e-a58b-ec43e8c76dcf","Type":"ContainerDied","Data":"99c7a90d2b43aa129dfd5528603d5619a7fe1345a284c0a1747fb00543be5ada"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.763677 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" event={"ID":"f05e35a9-ee56-406e-a58b-ec43e8c76dcf","Type":"ContainerStarted","Data":"be005de97aed1a4b16838470ee4b48748e3b693d6d50f92d1237ba111e4b85d8"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.767389 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a","Type":"ContainerStarted","Data":"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.767435 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a","Type":"ContainerStarted","Data":"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.767451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a","Type":"ContainerStarted","Data":"9a2253f2590baeaf21f6f2336ec3d978a220879887e418dfec0a6df7df7de5bd"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.768423 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.770155 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4e9df765-ce65-43eb-bdcc-344fb7f68889","Type":"ContainerStarted","Data":"da23994f034a375074ebfe9833b7ef3cb84ac3da49e24534bae1359036655b03"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.770194 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4e9df765-ce65-43eb-bdcc-344fb7f68889","Type":"ContainerStarted","Data":"1b3c22849ec63b9d8757e4e5d0b81692d20c0dbd4136b9b367a98af5edbd59ec"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.771013 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4","Type":"ContainerStarted","Data":"f676bb1fc7746f9168749d10f30acada14baeb9f5bda53d3bb16949619c6d0be"} Dec 02 18:59:11 crc kubenswrapper[4792]: I1202 18:59:11.808597 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.808580334 podStartE2EDuration="2.808580334s" podCreationTimestamp="2025-12-02 18:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:11.805877574 +0000 UTC m=+1382.578769902" watchObservedRunningTime="2025-12-02 18:59:11.808580334 +0000 UTC m=+1382.581472662" Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.782142 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4e9df765-ce65-43eb-bdcc-344fb7f68889","Type":"ContainerStarted","Data":"3c8fd895b5e6bca3498ead49ea0365e9aa5be788c757d70e9a39a59823f7ecd0"} Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.784073 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4","Type":"ContainerStarted","Data":"354f6242c985cf11327b070139ec2b0feac5f5afaac005ad9aee7a2dcfd49850"} Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.785995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" event={"ID":"f05e35a9-ee56-406e-a58b-ec43e8c76dcf","Type":"ContainerStarted","Data":"1aa41d8bc5c9b7f52d3813de7c4fa792b2a5d3f3ebe25c20e24c2575d692a575"} Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.818018 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.869335 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.8693148600000002 podStartE2EDuration="3.86931486s" podCreationTimestamp="2025-12-02 18:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:12.836794417 +0000 UTC m=+1383.609686745" watchObservedRunningTime="2025-12-02 18:59:12.86931486 +0000 UTC m=+1383.642207188" Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.873783 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" podStartSLOduration=3.873763265 podStartE2EDuration="3.873763265s" podCreationTimestamp="2025-12-02 18:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:12.856012635 +0000 UTC m=+1383.628904963" watchObservedRunningTime="2025-12-02 18:59:12.873763265 +0000 UTC m=+1383.646655593" Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.890562 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.654936153 podStartE2EDuration="3.89054472s" podCreationTimestamp="2025-12-02 18:59:09 +0000 UTC" firstStartedPulling="2025-12-02 18:59:10.826895196 +0000 UTC m=+1381.599787524" lastFinishedPulling="2025-12-02 18:59:12.062503763 +0000 UTC m=+1382.835396091" observedRunningTime="2025-12-02 18:59:12.881940397 +0000 UTC m=+1383.654832725" watchObservedRunningTime="2025-12-02 18:59:12.89054472 +0000 UTC m=+1383.663437048" Dec 02 18:59:12 crc kubenswrapper[4792]: I1202 18:59:12.901166 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:13 crc kubenswrapper[4792]: I1202 18:59:13.794474 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:14 crc kubenswrapper[4792]: I1202 18:59:14.801677 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api-log" containerID="cri-o://86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907" gracePeriod=30 Dec 02 18:59:14 crc kubenswrapper[4792]: I1202 18:59:14.803472 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" containerName="cloudkitty-proc" containerID="cri-o://354f6242c985cf11327b070139ec2b0feac5f5afaac005ad9aee7a2dcfd49850" gracePeriod=30 Dec 02 18:59:14 crc kubenswrapper[4792]: I1202 18:59:14.803838 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api" containerID="cri-o://682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96" gracePeriod=30 Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.146297 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.672588 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.816686 4792 generic.go:334] "Generic (PLEG): container finished" podID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerID="682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96" exitCode=0 Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.816716 4792 generic.go:334] "Generic (PLEG): container finished" podID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerID="86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907" exitCode=143 Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.816760 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a","Type":"ContainerDied","Data":"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96"} Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.816795 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.816812 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a","Type":"ContainerDied","Data":"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907"} Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.816823 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a","Type":"ContainerDied","Data":"9a2253f2590baeaf21f6f2336ec3d978a220879887e418dfec0a6df7df7de5bd"} Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.816839 4792 scope.go:117] "RemoveContainer" containerID="682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.840038 4792 scope.go:117] "RemoveContainer" containerID="86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.866301 4792 scope.go:117] "RemoveContainer" containerID="682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96" Dec 02 18:59:15 crc kubenswrapper[4792]: E1202 18:59:15.866729 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96\": container with ID starting with 682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96 not found: ID does not exist" containerID="682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.866765 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96"} err="failed to get container status \"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96\": rpc error: code = NotFound desc = could not find container \"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96\": container with ID starting with 682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96 not found: ID does not exist" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.866791 4792 scope.go:117] "RemoveContainer" containerID="86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907" Dec 02 18:59:15 crc kubenswrapper[4792]: E1202 18:59:15.867076 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907\": container with ID starting with 86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907 not found: ID does not exist" containerID="86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.867113 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907"} err="failed to get container status \"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907\": rpc error: code = NotFound desc = could not find container \"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907\": container with ID starting with 86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907 not found: ID does not exist" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.867135 4792 scope.go:117] "RemoveContainer" containerID="682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.868924 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96"} err="failed to get container status \"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96\": rpc error: code = NotFound desc = could not find container \"682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96\": container with ID starting with 682734625fa52274d0b75bf5584cb63706938b6c4935098ec8ff1ba8d2d52e96 not found: ID does not exist" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.868957 4792 scope.go:117] "RemoveContainer" containerID="86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.869211 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907"} err="failed to get container status \"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907\": rpc error: code = NotFound desc = could not find container \"86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907\": container with ID starting with 86edf77b1a085fdd7014a7b6c2879d1a0544bf1c00307bdb11a799e00fee2907 not found: ID does not exist" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.873157 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-scripts\") pod \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.873328 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data\") pod \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.873428 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-combined-ca-bundle\") pod \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.873537 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data-custom\") pod \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.873681 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-logs\") pod \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.873792 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtsmq\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-kube-api-access-mtsmq\") pod \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.873953 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-certs\") pod \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\" (UID: \"876ebd0d-b0ea-4897-a4eb-d8b3acaf592a\") " Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.879779 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-logs" (OuterVolumeSpecName: "logs") pod "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" (UID: "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.886380 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-certs" (OuterVolumeSpecName: "certs") pod "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" (UID: "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.889669 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" (UID: "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.893979 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-scripts" (OuterVolumeSpecName: "scripts") pod "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" (UID: "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.896667 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-kube-api-access-mtsmq" (OuterVolumeSpecName: "kube-api-access-mtsmq") pod "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" (UID: "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a"). InnerVolumeSpecName "kube-api-access-mtsmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.957626 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" (UID: "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.977937 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.977967 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.977978 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.977988 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtsmq\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-kube-api-access-mtsmq\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.977996 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:15 crc kubenswrapper[4792]: I1202 18:59:15.978004 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.010553 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data" (OuterVolumeSpecName: "config-data") pod "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" (UID: "876ebd0d-b0ea-4897-a4eb-d8b3acaf592a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.078947 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.146244 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.154170 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.171350 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:16 crc kubenswrapper[4792]: E1202 18:59:16.171744 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.171761 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api" Dec 02 18:59:16 crc kubenswrapper[4792]: E1202 18:59:16.171786 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api-log" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.171793 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api-log" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.171981 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api-log" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.172008 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" containerName="cloudkitty-api" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.173393 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.175300 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.176014 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.176426 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180323 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180354 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180373 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180419 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-scripts\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180452 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180475 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80b99eef-7d18-47ff-baa1-8666aaa0cd86-logs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180514 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180655 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.180704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6bpv\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-kube-api-access-d6bpv\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.184415 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282201 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-scripts\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282260 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282285 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80b99eef-7d18-47ff-baa1-8666aaa0cd86-logs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282325 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282348 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282397 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6bpv\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-kube-api-access-d6bpv\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282426 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282443 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.282461 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.286547 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.286689 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.287077 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.288417 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80b99eef-7d18-47ff-baa1-8666aaa0cd86-logs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.294067 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-scripts\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.294658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.307000 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.311363 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.329816 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6bpv\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-kube-api-access-d6bpv\") pod \"cloudkitty-api-0\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.520511 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.556586 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.609783 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55bc995c96-sn8lv" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.839362 4792 generic.go:334] "Generic (PLEG): container finished" podID="63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" containerID="354f6242c985cf11327b070139ec2b0feac5f5afaac005ad9aee7a2dcfd49850" exitCode=0 Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.839697 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4","Type":"ContainerDied","Data":"354f6242c985cf11327b070139ec2b0feac5f5afaac005ad9aee7a2dcfd49850"} Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.871409 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.909118 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.998436 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdgpw\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-kube-api-access-kdgpw\") pod \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.998613 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-scripts\") pod \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.998652 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-certs\") pod \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.998732 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data-custom\") pod \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.998811 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-combined-ca-bundle\") pod \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " Dec 02 18:59:16 crc kubenswrapper[4792]: I1202 18:59:16.998977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data\") pod \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\" (UID: \"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4\") " Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.006752 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-scripts" (OuterVolumeSpecName: "scripts") pod "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" (UID: "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.006945 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-kube-api-access-kdgpw" (OuterVolumeSpecName: "kube-api-access-kdgpw") pod "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" (UID: "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4"). InnerVolumeSpecName "kube-api-access-kdgpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.009883 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-certs" (OuterVolumeSpecName: "certs") pod "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" (UID: "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.021665 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" (UID: "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.033873 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data" (OuterVolumeSpecName: "config-data") pod "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" (UID: "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.056683 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" (UID: "63ce0ba8-1889-450a-a6a2-c55bba3e5dc4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.073341 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.102058 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdgpw\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-kube-api-access-kdgpw\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.102484 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.102542 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.102553 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.102563 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.102573 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.393168 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-54b9cc4f54-2nnzj" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.551530 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="876ebd0d-b0ea-4897-a4eb-d8b3acaf592a" path="/var/lib/kubelet/pods/876ebd0d-b0ea-4897-a4eb-d8b3acaf592a/volumes" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.851451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"80b99eef-7d18-47ff-baa1-8666aaa0cd86","Type":"ContainerStarted","Data":"8986d95c722cf9982017b116352adf82f1d6b26bb936c977f371832d9ca9bf27"} Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.852704 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"80b99eef-7d18-47ff-baa1-8666aaa0cd86","Type":"ContainerStarted","Data":"81d4ab47247a11dfb4482e877a0ac3a23bfe9e69a52046d67402330cedf913c6"} Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.852815 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.852895 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"80b99eef-7d18-47ff-baa1-8666aaa0cd86","Type":"ContainerStarted","Data":"727a4e3a563c760e9a31de51e471cd7b4a01c16a0704278abb65b97298f9fcf5"} Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.853686 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"63ce0ba8-1889-450a-a6a2-c55bba3e5dc4","Type":"ContainerDied","Data":"f676bb1fc7746f9168749d10f30acada14baeb9f5bda53d3bb16949619c6d0be"} Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.853739 4792 scope.go:117] "RemoveContainer" containerID="354f6242c985cf11327b070139ec2b0feac5f5afaac005ad9aee7a2dcfd49850" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.853863 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.894103 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=1.89408303 podStartE2EDuration="1.89408303s" podCreationTimestamp="2025-12-02 18:59:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:17.869921574 +0000 UTC m=+1388.642813902" watchObservedRunningTime="2025-12-02 18:59:17.89408303 +0000 UTC m=+1388.666975368" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.906004 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.914303 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.924195 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:17 crc kubenswrapper[4792]: E1202 18:59:17.924671 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" containerName="cloudkitty-proc" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.924688 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" containerName="cloudkitty-proc" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.924894 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" containerName="cloudkitty-proc" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.925687 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.929019 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 02 18:59:17 crc kubenswrapper[4792]: I1202 18:59:17.932706 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.028856 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.029053 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.029201 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7dhr\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-kube-api-access-r7dhr\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.029297 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.029384 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-certs\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.029465 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.131809 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7dhr\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-kube-api-access-r7dhr\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.132172 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.132313 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-certs\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.132413 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.132720 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.133251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.143370 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.143707 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.144408 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.144876 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-certs\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.154311 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.158005 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7dhr\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-kube-api-access-r7dhr\") pod \"cloudkitty-proc-0\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.315137 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.637186 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.638746 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.640193 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-t5r7p" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.640371 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.640424 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.648653 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.742814 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.742866 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9d7r\" (UniqueName: \"kubernetes.io/projected/b36ffc08-785b-49a8-9b63-ca144980a780-kube-api-access-r9d7r\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.743173 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.743460 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config-secret\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.845162 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.845694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config-secret\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.845720 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.845746 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9d7r\" (UniqueName: \"kubernetes.io/projected/b36ffc08-785b-49a8-9b63-ca144980a780-kube-api-access-r9d7r\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.846431 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.851575 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config-secret\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.853172 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.875050 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9d7r\" (UniqueName: \"kubernetes.io/projected/b36ffc08-785b-49a8-9b63-ca144980a780-kube-api-access-r9d7r\") pod \"openstackclient\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.877387 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.899601 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.900471 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 18:59:18 crc kubenswrapper[4792]: I1202 18:59:18.909696 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.021567 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.022929 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.073654 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.120833 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd57b\" (UniqueName: \"kubernetes.io/projected/5703c717-1bce-4ccc-aff7-16c5fe72e724-kube-api-access-cd57b\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.120903 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5703c717-1bce-4ccc-aff7-16c5fe72e724-openstack-config-secret\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.120933 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5703c717-1bce-4ccc-aff7-16c5fe72e724-openstack-config\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.120974 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5703c717-1bce-4ccc-aff7-16c5fe72e724-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.231684 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5703c717-1bce-4ccc-aff7-16c5fe72e724-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.232081 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd57b\" (UniqueName: \"kubernetes.io/projected/5703c717-1bce-4ccc-aff7-16c5fe72e724-kube-api-access-cd57b\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.232113 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5703c717-1bce-4ccc-aff7-16c5fe72e724-openstack-config-secret\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.232134 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5703c717-1bce-4ccc-aff7-16c5fe72e724-openstack-config\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.232893 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5703c717-1bce-4ccc-aff7-16c5fe72e724-openstack-config\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.252257 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5703c717-1bce-4ccc-aff7-16c5fe72e724-openstack-config-secret\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.252410 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5703c717-1bce-4ccc-aff7-16c5fe72e724-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.286757 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd57b\" (UniqueName: \"kubernetes.io/projected/5703c717-1bce-4ccc-aff7-16c5fe72e724-kube-api-access-cd57b\") pod \"openstackclient\" (UID: \"5703c717-1bce-4ccc-aff7-16c5fe72e724\") " pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: E1202 18:59:19.304669 4792 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 02 18:59:19 crc kubenswrapper[4792]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_b36ffc08-785b-49a8-9b63-ca144980a780_0(703570f1822d4f9dc9427eeff4fd2cf5119e739405710c49d99395b15ec9411f): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"703570f1822d4f9dc9427eeff4fd2cf5119e739405710c49d99395b15ec9411f" Netns:"/var/run/netns/67b778cf-0351-4936-8a63-7ffde6972b4f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=703570f1822d4f9dc9427eeff4fd2cf5119e739405710c49d99395b15ec9411f;K8S_POD_UID=b36ffc08-785b-49a8-9b63-ca144980a780" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/b36ffc08-785b-49a8-9b63-ca144980a780]: expected pod UID "b36ffc08-785b-49a8-9b63-ca144980a780" but got "5703c717-1bce-4ccc-aff7-16c5fe72e724" from Kube API Dec 02 18:59:19 crc kubenswrapper[4792]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 02 18:59:19 crc kubenswrapper[4792]: > Dec 02 18:59:19 crc kubenswrapper[4792]: E1202 18:59:19.304741 4792 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 02 18:59:19 crc kubenswrapper[4792]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_b36ffc08-785b-49a8-9b63-ca144980a780_0(703570f1822d4f9dc9427eeff4fd2cf5119e739405710c49d99395b15ec9411f): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"703570f1822d4f9dc9427eeff4fd2cf5119e739405710c49d99395b15ec9411f" Netns:"/var/run/netns/67b778cf-0351-4936-8a63-7ffde6972b4f" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=703570f1822d4f9dc9427eeff4fd2cf5119e739405710c49d99395b15ec9411f;K8S_POD_UID=b36ffc08-785b-49a8-9b63-ca144980a780" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/b36ffc08-785b-49a8-9b63-ca144980a780]: expected pod UID "b36ffc08-785b-49a8-9b63-ca144980a780" but got "5703c717-1bce-4ccc-aff7-16c5fe72e724" from Kube API Dec 02 18:59:19 crc kubenswrapper[4792]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 02 18:59:19 crc kubenswrapper[4792]: > pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.417662 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.553553 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63ce0ba8-1889-450a-a6a2-c55bba3e5dc4" path="/var/lib/kubelet/pods/63ce0ba8-1889-450a-a6a2-c55bba3e5dc4/volumes" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.873392 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.874557 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4e89728d-8577-48e0-9452-0bd22ad3ff6d","Type":"ContainerStarted","Data":"bbaa7b074942b09d92fd90a7481b2a1369c9c719e1ff5cfcb1560b06b1657a81"} Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.874585 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4e89728d-8577-48e0-9452-0bd22ad3ff6d","Type":"ContainerStarted","Data":"5bed5a34fe6db6cbe5f33993b8c814af1a5e0d0ef5759db1dabf2aa25a1b39e5"} Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.887940 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.906768 4792 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="b36ffc08-785b-49a8-9b63-ca144980a780" podUID="5703c717-1bce-4ccc-aff7-16c5fe72e724" Dec 02 18:59:19 crc kubenswrapper[4792]: W1202 18:59:19.928168 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5703c717_1bce_4ccc_aff7_16c5fe72e724.slice/crio-5b7cf5dbc02d9ab3b5df6f40f81e9dc2fd8f3f60ed97a7c17e9ebf2a482bb2e4 WatchSource:0}: Error finding container 5b7cf5dbc02d9ab3b5df6f40f81e9dc2fd8f3f60ed97a7c17e9ebf2a482bb2e4: Status 404 returned error can't find the container with id 5b7cf5dbc02d9ab3b5df6f40f81e9dc2fd8f3f60ed97a7c17e9ebf2a482bb2e4 Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.931347 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.931323289 podStartE2EDuration="2.931323289s" podCreationTimestamp="2025-12-02 18:59:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:19.901206379 +0000 UTC m=+1390.674098717" watchObservedRunningTime="2025-12-02 18:59:19.931323289 +0000 UTC m=+1390.704215617" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.936097 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.946368 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config\") pod \"b36ffc08-785b-49a8-9b63-ca144980a780\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.946620 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config-secret\") pod \"b36ffc08-785b-49a8-9b63-ca144980a780\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.946765 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-combined-ca-bundle\") pod \"b36ffc08-785b-49a8-9b63-ca144980a780\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.946853 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9d7r\" (UniqueName: \"kubernetes.io/projected/b36ffc08-785b-49a8-9b63-ca144980a780-kube-api-access-r9d7r\") pod \"b36ffc08-785b-49a8-9b63-ca144980a780\" (UID: \"b36ffc08-785b-49a8-9b63-ca144980a780\") " Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.947122 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "b36ffc08-785b-49a8-9b63-ca144980a780" (UID: "b36ffc08-785b-49a8-9b63-ca144980a780"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.947385 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.952763 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b36ffc08-785b-49a8-9b63-ca144980a780" (UID: "b36ffc08-785b-49a8-9b63-ca144980a780"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.952808 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b36ffc08-785b-49a8-9b63-ca144980a780-kube-api-access-r9d7r" (OuterVolumeSpecName: "kube-api-access-r9d7r") pod "b36ffc08-785b-49a8-9b63-ca144980a780" (UID: "b36ffc08-785b-49a8-9b63-ca144980a780"). InnerVolumeSpecName "kube-api-access-r9d7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:19 crc kubenswrapper[4792]: I1202 18:59:19.959586 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "b36ffc08-785b-49a8-9b63-ca144980a780" (UID: "b36ffc08-785b-49a8-9b63-ca144980a780"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.050133 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.050171 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36ffc08-785b-49a8-9b63-ca144980a780-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.050180 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9d7r\" (UniqueName: \"kubernetes.io/projected/b36ffc08-785b-49a8-9b63-ca144980a780-kube-api-access-r9d7r\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.307701 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.388629 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-g5f72"] Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.388843 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" podUID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerName="dnsmasq-dns" containerID="cri-o://45e1fbbb485affa2365cd04248d2f3c1cb5430f0170aeea240b30792e8dc840c" gracePeriod=10 Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.411701 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.927817 4792 generic.go:334] "Generic (PLEG): container finished" podID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerID="45e1fbbb485affa2365cd04248d2f3c1cb5430f0170aeea240b30792e8dc840c" exitCode=0 Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.928208 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" event={"ID":"d440050a-59d7-4ea9-95a3-ca0b3beff640","Type":"ContainerDied","Data":"45e1fbbb485affa2365cd04248d2f3c1cb5430f0170aeea240b30792e8dc840c"} Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.938112 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.938588 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5703c717-1bce-4ccc-aff7-16c5fe72e724","Type":"ContainerStarted","Data":"5b7cf5dbc02d9ab3b5df6f40f81e9dc2fd8f3f60ed97a7c17e9ebf2a482bb2e4"} Dec 02 18:59:20 crc kubenswrapper[4792]: I1202 18:59:20.979843 4792 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="b36ffc08-785b-49a8-9b63-ca144980a780" podUID="5703c717-1bce-4ccc-aff7-16c5fe72e724" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.341305 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.505700 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-svc\") pod \"d440050a-59d7-4ea9-95a3-ca0b3beff640\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.505771 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-sb\") pod \"d440050a-59d7-4ea9-95a3-ca0b3beff640\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.505839 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-nb\") pod \"d440050a-59d7-4ea9-95a3-ca0b3beff640\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.505942 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs7l2\" (UniqueName: \"kubernetes.io/projected/d440050a-59d7-4ea9-95a3-ca0b3beff640-kube-api-access-gs7l2\") pod \"d440050a-59d7-4ea9-95a3-ca0b3beff640\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.505980 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-config\") pod \"d440050a-59d7-4ea9-95a3-ca0b3beff640\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.506002 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-swift-storage-0\") pod \"d440050a-59d7-4ea9-95a3-ca0b3beff640\" (UID: \"d440050a-59d7-4ea9-95a3-ca0b3beff640\") " Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.541697 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d440050a-59d7-4ea9-95a3-ca0b3beff640-kube-api-access-gs7l2" (OuterVolumeSpecName: "kube-api-access-gs7l2") pod "d440050a-59d7-4ea9-95a3-ca0b3beff640" (UID: "d440050a-59d7-4ea9-95a3-ca0b3beff640"). InnerVolumeSpecName "kube-api-access-gs7l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.571005 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b36ffc08-785b-49a8-9b63-ca144980a780" path="/var/lib/kubelet/pods/b36ffc08-785b-49a8-9b63-ca144980a780/volumes" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.608397 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs7l2\" (UniqueName: \"kubernetes.io/projected/d440050a-59d7-4ea9-95a3-ca0b3beff640-kube-api-access-gs7l2\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.619434 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d440050a-59d7-4ea9-95a3-ca0b3beff640" (UID: "d440050a-59d7-4ea9-95a3-ca0b3beff640"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.658053 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d440050a-59d7-4ea9-95a3-ca0b3beff640" (UID: "d440050a-59d7-4ea9-95a3-ca0b3beff640"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.696161 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-config" (OuterVolumeSpecName: "config") pod "d440050a-59d7-4ea9-95a3-ca0b3beff640" (UID: "d440050a-59d7-4ea9-95a3-ca0b3beff640"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.700887 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d440050a-59d7-4ea9-95a3-ca0b3beff640" (UID: "d440050a-59d7-4ea9-95a3-ca0b3beff640"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.717271 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-config\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.717307 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.717318 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.717326 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.742815 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d440050a-59d7-4ea9-95a3-ca0b3beff640" (UID: "d440050a-59d7-4ea9-95a3-ca0b3beff640"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.824290 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d440050a-59d7-4ea9-95a3-ca0b3beff640-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.967658 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" event={"ID":"d440050a-59d7-4ea9-95a3-ca0b3beff640","Type":"ContainerDied","Data":"d50e667cd1fb476ff0006457797fdc2304d65cd274f272d091cf86bc9973e8e0"} Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.967922 4792 scope.go:117] "RemoveContainer" containerID="45e1fbbb485affa2365cd04248d2f3c1cb5430f0170aeea240b30792e8dc840c" Dec 02 18:59:21 crc kubenswrapper[4792]: I1202 18:59:21.967733 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-g5f72" Dec 02 18:59:22 crc kubenswrapper[4792]: I1202 18:59:22.025268 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-g5f72"] Dec 02 18:59:22 crc kubenswrapper[4792]: I1202 18:59:22.052667 4792 scope.go:117] "RemoveContainer" containerID="14ed918c594319e33686fe22c1cfca2b59d830769068b32319c2f10325c12a23" Dec 02 18:59:22 crc kubenswrapper[4792]: I1202 18:59:22.065860 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-g5f72"] Dec 02 18:59:23 crc kubenswrapper[4792]: I1202 18:59:23.554506 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d440050a-59d7-4ea9-95a3-ca0b3beff640" path="/var/lib/kubelet/pods/d440050a-59d7-4ea9-95a3-ca0b3beff640/volumes" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.792006 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-79bc665747-kkc2q"] Dec 02 18:59:24 crc kubenswrapper[4792]: E1202 18:59:24.792823 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerName="init" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.792835 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerName="init" Dec 02 18:59:24 crc kubenswrapper[4792]: E1202 18:59:24.792854 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerName="dnsmasq-dns" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.792859 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerName="dnsmasq-dns" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.793041 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="d440050a-59d7-4ea9-95a3-ca0b3beff640" containerName="dnsmasq-dns" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.794997 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.798837 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.799029 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.800148 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.841171 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-79bc665747-kkc2q"] Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920608 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-internal-tls-certs\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920674 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3263958-3718-4ceb-8751-6fa73a1a60f5-run-httpd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920739 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-public-tls-certs\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920757 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3263958-3718-4ceb-8751-6fa73a1a60f5-log-httpd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920780 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-combined-ca-bundle\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920835 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz9kd\" (UniqueName: \"kubernetes.io/projected/e3263958-3718-4ceb-8751-6fa73a1a60f5-kube-api-access-kz9kd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920857 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e3263958-3718-4ceb-8751-6fa73a1a60f5-etc-swift\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:24 crc kubenswrapper[4792]: I1202 18:59:24.920899 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-config-data\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023642 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-public-tls-certs\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023687 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3263958-3718-4ceb-8751-6fa73a1a60f5-log-httpd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023718 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-combined-ca-bundle\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023774 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz9kd\" (UniqueName: \"kubernetes.io/projected/e3263958-3718-4ceb-8751-6fa73a1a60f5-kube-api-access-kz9kd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023800 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e3263958-3718-4ceb-8751-6fa73a1a60f5-etc-swift\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023840 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-config-data\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023868 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-internal-tls-certs\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.023897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3263958-3718-4ceb-8751-6fa73a1a60f5-run-httpd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.024325 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3263958-3718-4ceb-8751-6fa73a1a60f5-run-httpd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.029232 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e3263958-3718-4ceb-8751-6fa73a1a60f5-log-httpd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.035432 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-public-tls-certs\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.038173 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-combined-ca-bundle\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.038350 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-config-data\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.043663 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e3263958-3718-4ceb-8751-6fa73a1a60f5-etc-swift\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.046237 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3263958-3718-4ceb-8751-6fa73a1a60f5-internal-tls-certs\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.059285 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz9kd\" (UniqueName: \"kubernetes.io/projected/e3263958-3718-4ceb-8751-6fa73a1a60f5-kube-api-access-kz9kd\") pod \"swift-proxy-79bc665747-kkc2q\" (UID: \"e3263958-3718-4ceb-8751-6fa73a1a60f5\") " pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.111001 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:25 crc kubenswrapper[4792]: I1202 18:59:25.669407 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-79bc665747-kkc2q"] Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.011696 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-79bc665747-kkc2q" event={"ID":"e3263958-3718-4ceb-8751-6fa73a1a60f5","Type":"ContainerStarted","Data":"dfad68c88687d8e72d29e2a13a4f5e4a7fea7a76accede978d74bb7f0287e1f3"} Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.011743 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-79bc665747-kkc2q" event={"ID":"e3263958-3718-4ceb-8751-6fa73a1a60f5","Type":"ContainerStarted","Data":"5e35d647a8756355d6ba061ec31a31e2123e5e99a6b06c68a18adbee9157506b"} Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.306394 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.306691 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-central-agent" containerID="cri-o://d951f879e180a1733e6caf68b83f64d3278a977cae86369951e62abb1031895c" gracePeriod=30 Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.306761 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="sg-core" containerID="cri-o://e2cd29cd81de34e9c4f059737f1aa19ffc896f08f746b414f5b36d4d29bf74f0" gracePeriod=30 Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.306818 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="proxy-httpd" containerID="cri-o://0d7bf3e3cca871b6aa15ce858d7e613f1823c41d0ac60b552f6e26eae853b127" gracePeriod=30 Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.306824 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-notification-agent" containerID="cri-o://0b13d655c1ac5b114467df92a6370247532aa915621d1370baee4276003eca17" gracePeriod=30 Dec 02 18:59:26 crc kubenswrapper[4792]: I1202 18:59:26.329588 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 02 18:59:27 crc kubenswrapper[4792]: I1202 18:59:27.022854 4792 generic.go:334] "Generic (PLEG): container finished" podID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerID="0d7bf3e3cca871b6aa15ce858d7e613f1823c41d0ac60b552f6e26eae853b127" exitCode=0 Dec 02 18:59:27 crc kubenswrapper[4792]: I1202 18:59:27.022885 4792 generic.go:334] "Generic (PLEG): container finished" podID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerID="e2cd29cd81de34e9c4f059737f1aa19ffc896f08f746b414f5b36d4d29bf74f0" exitCode=2 Dec 02 18:59:27 crc kubenswrapper[4792]: I1202 18:59:27.022894 4792 generic.go:334] "Generic (PLEG): container finished" podID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerID="d951f879e180a1733e6caf68b83f64d3278a977cae86369951e62abb1031895c" exitCode=0 Dec 02 18:59:27 crc kubenswrapper[4792]: I1202 18:59:27.022926 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerDied","Data":"0d7bf3e3cca871b6aa15ce858d7e613f1823c41d0ac60b552f6e26eae853b127"} Dec 02 18:59:27 crc kubenswrapper[4792]: I1202 18:59:27.022971 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerDied","Data":"e2cd29cd81de34e9c4f059737f1aa19ffc896f08f746b414f5b36d4d29bf74f0"} Dec 02 18:59:27 crc kubenswrapper[4792]: I1202 18:59:27.022983 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerDied","Data":"d951f879e180a1733e6caf68b83f64d3278a977cae86369951e62abb1031895c"} Dec 02 18:59:29 crc kubenswrapper[4792]: I1202 18:59:29.051694 4792 generic.go:334] "Generic (PLEG): container finished" podID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerID="0b13d655c1ac5b114467df92a6370247532aa915621d1370baee4276003eca17" exitCode=0 Dec 02 18:59:29 crc kubenswrapper[4792]: I1202 18:59:29.051776 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerDied","Data":"0b13d655c1ac5b114467df92a6370247532aa915621d1370baee4276003eca17"} Dec 02 18:59:29 crc kubenswrapper[4792]: I1202 18:59:29.990992 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.182:3000/\": dial tcp 10.217.0.182:3000: connect: connection refused" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.003939 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-ccnq7"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.005471 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.017673 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-ccnq7"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.056867 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2675e4dc-601e-4d2f-9fe9-db69ca73b109-operator-scripts\") pod \"nova-api-db-create-ccnq7\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.056948 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwwvx\" (UniqueName: \"kubernetes.io/projected/2675e4dc-601e-4d2f-9fe9-db69ca73b109-kube-api-access-xwwvx\") pod \"nova-api-db-create-ccnq7\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.106960 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-sbnrq"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.108601 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.129302 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-426b-account-create-update-7chxn"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.130720 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.135861 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.143233 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-sbnrq"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.151674 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-426b-account-create-update-7chxn"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.160538 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k9sl\" (UniqueName: \"kubernetes.io/projected/80838be7-b865-482d-a009-3338e3328a3d-kube-api-access-8k9sl\") pod \"nova-cell0-db-create-sbnrq\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.160602 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80838be7-b865-482d-a009-3338e3328a3d-operator-scripts\") pod \"nova-cell0-db-create-sbnrq\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.160656 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2675e4dc-601e-4d2f-9fe9-db69ca73b109-operator-scripts\") pod \"nova-api-db-create-ccnq7\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.160712 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwwvx\" (UniqueName: \"kubernetes.io/projected/2675e4dc-601e-4d2f-9fe9-db69ca73b109-kube-api-access-xwwvx\") pod \"nova-api-db-create-ccnq7\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.161754 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2675e4dc-601e-4d2f-9fe9-db69ca73b109-operator-scripts\") pod \"nova-api-db-create-ccnq7\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.210292 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwwvx\" (UniqueName: \"kubernetes.io/projected/2675e4dc-601e-4d2f-9fe9-db69ca73b109-kube-api-access-xwwvx\") pod \"nova-api-db-create-ccnq7\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.263864 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-lh6kq"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.265660 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.274087 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k9sl\" (UniqueName: \"kubernetes.io/projected/80838be7-b865-482d-a009-3338e3328a3d-kube-api-access-8k9sl\") pod \"nova-cell0-db-create-sbnrq\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.274170 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80838be7-b865-482d-a009-3338e3328a3d-operator-scripts\") pod \"nova-cell0-db-create-sbnrq\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.274206 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmxpd\" (UniqueName: \"kubernetes.io/projected/e3c55368-83f1-4bc7-921d-44111791eb23-kube-api-access-wmxpd\") pod \"nova-api-426b-account-create-update-7chxn\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.277847 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80838be7-b865-482d-a009-3338e3328a3d-operator-scripts\") pod \"nova-cell0-db-create-sbnrq\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.287832 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3c55368-83f1-4bc7-921d-44111791eb23-operator-scripts\") pod \"nova-api-426b-account-create-update-7chxn\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.295657 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-lh6kq"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.319367 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k9sl\" (UniqueName: \"kubernetes.io/projected/80838be7-b865-482d-a009-3338e3328a3d-kube-api-access-8k9sl\") pod \"nova-cell0-db-create-sbnrq\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.328176 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.340371 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.340607 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-log" containerID="cri-o://86bcb694850473f45b157cab869bfab13bb5d124c31275ba77dd4e7dd0f23b8d" gracePeriod=30 Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.340725 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-httpd" containerID="cri-o://d10a40ec1c6dd79d132d13e2508e96c3c0c660d6f43d15aa0c96c97cc90c45c3" gracePeriod=30 Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.390387 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knpm6\" (UniqueName: \"kubernetes.io/projected/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-kube-api-access-knpm6\") pod \"nova-cell1-db-create-lh6kq\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.390536 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmxpd\" (UniqueName: \"kubernetes.io/projected/e3c55368-83f1-4bc7-921d-44111791eb23-kube-api-access-wmxpd\") pod \"nova-api-426b-account-create-update-7chxn\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.390656 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3c55368-83f1-4bc7-921d-44111791eb23-operator-scripts\") pod \"nova-api-426b-account-create-update-7chxn\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.390751 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-operator-scripts\") pod \"nova-cell1-db-create-lh6kq\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.391904 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3c55368-83f1-4bc7-921d-44111791eb23-operator-scripts\") pod \"nova-api-426b-account-create-update-7chxn\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.397939 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-4ebd-account-create-update-qqlvf"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.399299 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.413780 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.425501 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4ebd-account-create-update-qqlvf"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.425907 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.427871 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmxpd\" (UniqueName: \"kubernetes.io/projected/e3c55368-83f1-4bc7-921d-44111791eb23-kube-api-access-wmxpd\") pod \"nova-api-426b-account-create-update-7chxn\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.468970 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.492219 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-operator-scripts\") pod \"nova-cell1-db-create-lh6kq\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.492278 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b7260d-e208-454c-b9c3-2de2ff32d356-operator-scripts\") pod \"nova-cell0-4ebd-account-create-update-qqlvf\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.492351 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4zl7\" (UniqueName: \"kubernetes.io/projected/85b7260d-e208-454c-b9c3-2de2ff32d356-kube-api-access-j4zl7\") pod \"nova-cell0-4ebd-account-create-update-qqlvf\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.492412 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knpm6\" (UniqueName: \"kubernetes.io/projected/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-kube-api-access-knpm6\") pod \"nova-cell1-db-create-lh6kq\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.493633 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-operator-scripts\") pod \"nova-cell1-db-create-lh6kq\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.521353 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knpm6\" (UniqueName: \"kubernetes.io/projected/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-kube-api-access-knpm6\") pod \"nova-cell1-db-create-lh6kq\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.562583 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.579186 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-1663-account-create-update-8xcft"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.590896 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.597844 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.598172 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b7260d-e208-454c-b9c3-2de2ff32d356-operator-scripts\") pod \"nova-cell0-4ebd-account-create-update-qqlvf\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.598289 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4zl7\" (UniqueName: \"kubernetes.io/projected/85b7260d-e208-454c-b9c3-2de2ff32d356-kube-api-access-j4zl7\") pod \"nova-cell0-4ebd-account-create-update-qqlvf\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.621779 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1663-account-create-update-8xcft"] Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.623679 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b7260d-e208-454c-b9c3-2de2ff32d356-operator-scripts\") pod \"nova-cell0-4ebd-account-create-update-qqlvf\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.627427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4zl7\" (UniqueName: \"kubernetes.io/projected/85b7260d-e208-454c-b9c3-2de2ff32d356-kube-api-access-j4zl7\") pod \"nova-cell0-4ebd-account-create-update-qqlvf\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.663394 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.810830 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68e98332-53c4-4f16-85cb-608e7b01e41b-operator-scripts\") pod \"nova-cell1-1663-account-create-update-8xcft\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.811090 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tjlv\" (UniqueName: \"kubernetes.io/projected/68e98332-53c4-4f16-85cb-608e7b01e41b-kube-api-access-8tjlv\") pod \"nova-cell1-1663-account-create-update-8xcft\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.915243 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tjlv\" (UniqueName: \"kubernetes.io/projected/68e98332-53c4-4f16-85cb-608e7b01e41b-kube-api-access-8tjlv\") pod \"nova-cell1-1663-account-create-update-8xcft\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.915718 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68e98332-53c4-4f16-85cb-608e7b01e41b-operator-scripts\") pod \"nova-cell1-1663-account-create-update-8xcft\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.916400 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68e98332-53c4-4f16-85cb-608e7b01e41b-operator-scripts\") pod \"nova-cell1-1663-account-create-update-8xcft\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:31 crc kubenswrapper[4792]: I1202 18:59:31.951284 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tjlv\" (UniqueName: \"kubernetes.io/projected/68e98332-53c4-4f16-85cb-608e7b01e41b-kube-api-access-8tjlv\") pod \"nova-cell1-1663-account-create-update-8xcft\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.034149 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.051176 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.108009 4792 generic.go:334] "Generic (PLEG): container finished" podID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerID="86bcb694850473f45b157cab869bfab13bb5d124c31275ba77dd4e7dd0f23b8d" exitCode=143 Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.108180 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"093444e9-8e53-44ae-bbd3-efcb0c374905","Type":"ContainerDied","Data":"86bcb694850473f45b157cab869bfab13bb5d124c31275ba77dd4e7dd0f23b8d"} Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.110900 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-79bc665747-kkc2q" event={"ID":"e3263958-3718-4ceb-8751-6fa73a1a60f5","Type":"ContainerStarted","Data":"907ad593e05e942e80dc2b04726a7635a6b85d67763a1bdd87843e450700dd46"} Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.111095 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.111441 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.119130 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a165cea6-0a2d-4386-8db8-aaf30d21d213","Type":"ContainerDied","Data":"fcf826e1bb7cc75961f1c85f554a593e29b9344cc5fed1b73fec88a3a9c8b007"} Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.119175 4792 scope.go:117] "RemoveContainer" containerID="0d7bf3e3cca871b6aa15ce858d7e613f1823c41d0ac60b552f6e26eae853b127" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.119303 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.122122 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-79bc665747-kkc2q" podUID="e3263958-3718-4ceb-8751-6fa73a1a60f5" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.144238 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-79bc665747-kkc2q" podStartSLOduration=8.144223367 podStartE2EDuration="8.144223367s" podCreationTimestamp="2025-12-02 18:59:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:32.144112594 +0000 UTC m=+1402.917004922" watchObservedRunningTime="2025-12-02 18:59:32.144223367 +0000 UTC m=+1402.917115695" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.175722 4792 scope.go:117] "RemoveContainer" containerID="e2cd29cd81de34e9c4f059737f1aa19ffc896f08f746b414f5b36d4d29bf74f0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.224171 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-scripts\") pod \"a165cea6-0a2d-4386-8db8-aaf30d21d213\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.224207 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-sg-core-conf-yaml\") pod \"a165cea6-0a2d-4386-8db8-aaf30d21d213\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.224238 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sbdb\" (UniqueName: \"kubernetes.io/projected/a165cea6-0a2d-4386-8db8-aaf30d21d213-kube-api-access-8sbdb\") pod \"a165cea6-0a2d-4386-8db8-aaf30d21d213\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.224304 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-config-data\") pod \"a165cea6-0a2d-4386-8db8-aaf30d21d213\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.224329 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-run-httpd\") pod \"a165cea6-0a2d-4386-8db8-aaf30d21d213\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.224384 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-log-httpd\") pod \"a165cea6-0a2d-4386-8db8-aaf30d21d213\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.224474 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-combined-ca-bundle\") pod \"a165cea6-0a2d-4386-8db8-aaf30d21d213\" (UID: \"a165cea6-0a2d-4386-8db8-aaf30d21d213\") " Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.234808 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-scripts" (OuterVolumeSpecName: "scripts") pod "a165cea6-0a2d-4386-8db8-aaf30d21d213" (UID: "a165cea6-0a2d-4386-8db8-aaf30d21d213"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.240157 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a165cea6-0a2d-4386-8db8-aaf30d21d213" (UID: "a165cea6-0a2d-4386-8db8-aaf30d21d213"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.240437 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a165cea6-0a2d-4386-8db8-aaf30d21d213" (UID: "a165cea6-0a2d-4386-8db8-aaf30d21d213"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.253844 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a165cea6-0a2d-4386-8db8-aaf30d21d213-kube-api-access-8sbdb" (OuterVolumeSpecName: "kube-api-access-8sbdb") pod "a165cea6-0a2d-4386-8db8-aaf30d21d213" (UID: "a165cea6-0a2d-4386-8db8-aaf30d21d213"). InnerVolumeSpecName "kube-api-access-8sbdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.270189 4792 scope.go:117] "RemoveContainer" containerID="0b13d655c1ac5b114467df92a6370247532aa915621d1370baee4276003eca17" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.297095 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a165cea6-0a2d-4386-8db8-aaf30d21d213" (UID: "a165cea6-0a2d-4386-8db8-aaf30d21d213"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.330988 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.331019 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.331030 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sbdb\" (UniqueName: \"kubernetes.io/projected/a165cea6-0a2d-4386-8db8-aaf30d21d213-kube-api-access-8sbdb\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.331039 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.331047 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a165cea6-0a2d-4386-8db8-aaf30d21d213-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.407954 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a165cea6-0a2d-4386-8db8-aaf30d21d213" (UID: "a165cea6-0a2d-4386-8db8-aaf30d21d213"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.424809 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-config-data" (OuterVolumeSpecName: "config-data") pod "a165cea6-0a2d-4386-8db8-aaf30d21d213" (UID: "a165cea6-0a2d-4386-8db8-aaf30d21d213"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.433133 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.433197 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a165cea6-0a2d-4386-8db8-aaf30d21d213-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.493734 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-ccnq7"] Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.498010 4792 scope.go:117] "RemoveContainer" containerID="d951f879e180a1733e6caf68b83f64d3278a977cae86369951e62abb1031895c" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.502635 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-426b-account-create-update-7chxn"] Dec 02 18:59:32 crc kubenswrapper[4792]: W1202 18:59:32.546643 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84ebdf59_d7dc_4bcd_ace8_76dadb52d06a.slice/crio-e746141d40c2d303eee3744e6215eeb328a16461012d16c772e97e125b217fea WatchSource:0}: Error finding container e746141d40c2d303eee3744e6215eeb328a16461012d16c772e97e125b217fea: Status 404 returned error can't find the container with id e746141d40c2d303eee3744e6215eeb328a16461012d16c772e97e125b217fea Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.561615 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-lh6kq"] Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.576513 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-sbnrq"] Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.712534 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4ebd-account-create-update-qqlvf"] Dec 02 18:59:32 crc kubenswrapper[4792]: W1202 18:59:32.713800 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85b7260d_e208_454c_b9c3_2de2ff32d356.slice/crio-dccecc959b4276f880b3cb4dad4e270feb83150ab2ae1fe2d46a91def2b13686 WatchSource:0}: Error finding container dccecc959b4276f880b3cb4dad4e270feb83150ab2ae1fe2d46a91def2b13686: Status 404 returned error can't find the container with id dccecc959b4276f880b3cb4dad4e270feb83150ab2ae1fe2d46a91def2b13686 Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.723051 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1663-account-create-update-8xcft"] Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.767206 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.781565 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:32 crc kubenswrapper[4792]: W1202 18:59:32.791109 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68e98332_53c4_4f16_85cb_608e7b01e41b.slice/crio-49062e1df09850dc9290d755b9ffcde85f531c13679a30f0eedcb7b0167b2b60 WatchSource:0}: Error finding container 49062e1df09850dc9290d755b9ffcde85f531c13679a30f0eedcb7b0167b2b60: Status 404 returned error can't find the container with id 49062e1df09850dc9290d755b9ffcde85f531c13679a30f0eedcb7b0167b2b60 Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.795455 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:32 crc kubenswrapper[4792]: E1202 18:59:32.795962 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-central-agent" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.795979 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-central-agent" Dec 02 18:59:32 crc kubenswrapper[4792]: E1202 18:59:32.796003 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="sg-core" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.796009 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="sg-core" Dec 02 18:59:32 crc kubenswrapper[4792]: E1202 18:59:32.796030 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-notification-agent" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.796037 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-notification-agent" Dec 02 18:59:32 crc kubenswrapper[4792]: E1202 18:59:32.796051 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="proxy-httpd" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.796057 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="proxy-httpd" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.796245 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-notification-agent" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.796266 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="ceilometer-central-agent" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.796275 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="proxy-httpd" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.796285 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" containerName="sg-core" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.798077 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.801878 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.802008 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.819515 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.942619 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.942668 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zwd5\" (UniqueName: \"kubernetes.io/projected/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-kube-api-access-9zwd5\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.942689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.942760 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-config-data\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.942779 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-run-httpd\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.942948 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-log-httpd\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:32 crc kubenswrapper[4792]: I1202 18:59:32.943024 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-scripts\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.045230 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-log-httpd\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.045510 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-scripts\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.045605 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.045643 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zwd5\" (UniqueName: \"kubernetes.io/projected/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-kube-api-access-9zwd5\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.045660 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.045731 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-config-data\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.045747 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-run-httpd\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.046095 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-log-httpd\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.047707 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-run-httpd\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.052658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.055515 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-config-data\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.055951 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-scripts\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.066053 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.081578 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zwd5\" (UniqueName: \"kubernetes.io/projected/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-kube-api-access-9zwd5\") pod \"ceilometer-0\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.126235 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.153178 4792 generic.go:334] "Generic (PLEG): container finished" podID="2675e4dc-601e-4d2f-9fe9-db69ca73b109" containerID="274afdc421cc4391c06ee206f88586ecc3083970599609f2ee9dbf95c56f893b" exitCode=0 Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.153243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-ccnq7" event={"ID":"2675e4dc-601e-4d2f-9fe9-db69ca73b109","Type":"ContainerDied","Data":"274afdc421cc4391c06ee206f88586ecc3083970599609f2ee9dbf95c56f893b"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.153269 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-ccnq7" event={"ID":"2675e4dc-601e-4d2f-9fe9-db69ca73b109","Type":"ContainerStarted","Data":"4f3c07a5a2be39a3fe4d931336bb96fdf5628bb383c2570a9aee2f88e10daa01"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.161995 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1663-account-create-update-8xcft" event={"ID":"68e98332-53c4-4f16-85cb-608e7b01e41b","Type":"ContainerStarted","Data":"059dea24a93777e8673f82fa81f07ee95814961213e6b30f17bbc62c0cad0ed3"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.162033 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1663-account-create-update-8xcft" event={"ID":"68e98332-53c4-4f16-85cb-608e7b01e41b","Type":"ContainerStarted","Data":"49062e1df09850dc9290d755b9ffcde85f531c13679a30f0eedcb7b0167b2b60"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.175291 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-sbnrq" event={"ID":"80838be7-b865-482d-a009-3338e3328a3d","Type":"ContainerStarted","Data":"96a66e081d2b958701ad9e4cf60c5b3a4b3623f12c93def8e8e345258766272f"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.175343 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-sbnrq" event={"ID":"80838be7-b865-482d-a009-3338e3328a3d","Type":"ContainerStarted","Data":"d6c5d3fbb146c605bdee89c4ef83d0581daaf979de06df41466d2ab6071c04ac"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.180056 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5703c717-1bce-4ccc-aff7-16c5fe72e724","Type":"ContainerStarted","Data":"55e970b53badb8fc11e00ffc591f39f4e399d5e6ef6de5d54daea206172af8e1"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.186454 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-426b-account-create-update-7chxn" event={"ID":"e3c55368-83f1-4bc7-921d-44111791eb23","Type":"ContainerStarted","Data":"91d316db9ad2573185af2ae8cdfc729352152d73d593db5d1cfcd17bfa3d669a"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.186495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-426b-account-create-update-7chxn" event={"ID":"e3c55368-83f1-4bc7-921d-44111791eb23","Type":"ContainerStarted","Data":"6aeaf1dd210a2a5ede9e7184fc87e584ca01802a291bcc5e18b31473aa54a4ea"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.191595 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-1663-account-create-update-8xcft" podStartSLOduration=2.191575796 podStartE2EDuration="2.191575796s" podCreationTimestamp="2025-12-02 18:59:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:33.185305574 +0000 UTC m=+1403.958197902" watchObservedRunningTime="2025-12-02 18:59:33.191575796 +0000 UTC m=+1403.964468124" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.202436 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" event={"ID":"85b7260d-e208-454c-b9c3-2de2ff32d356","Type":"ContainerStarted","Data":"dccecc959b4276f880b3cb4dad4e270feb83150ab2ae1fe2d46a91def2b13686"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.205908 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lh6kq" event={"ID":"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a","Type":"ContainerStarted","Data":"613ff8dfa3ac486cbef5560a93d5b8e8fbc70b7ef735f6c4f86b5be131b12ded"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.205978 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lh6kq" event={"ID":"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a","Type":"ContainerStarted","Data":"e746141d40c2d303eee3744e6215eeb328a16461012d16c772e97e125b217fea"} Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.215455 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.559524989 podStartE2EDuration="15.215436285s" podCreationTimestamp="2025-12-02 18:59:18 +0000 UTC" firstStartedPulling="2025-12-02 18:59:19.932762926 +0000 UTC m=+1390.705655254" lastFinishedPulling="2025-12-02 18:59:31.588674222 +0000 UTC m=+1402.361566550" observedRunningTime="2025-12-02 18:59:33.202624203 +0000 UTC m=+1403.975516521" watchObservedRunningTime="2025-12-02 18:59:33.215436285 +0000 UTC m=+1403.988328613" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.219927 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-79bc665747-kkc2q" podUID="e3263958-3718-4ceb-8751-6fa73a1a60f5" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.290590 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-sbnrq" podStartSLOduration=2.290571542 podStartE2EDuration="2.290571542s" podCreationTimestamp="2025-12-02 18:59:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:33.214380677 +0000 UTC m=+1403.987273005" watchObservedRunningTime="2025-12-02 18:59:33.290571542 +0000 UTC m=+1404.063463870" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.329029 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-lh6kq" podStartSLOduration=2.329004757 podStartE2EDuration="2.329004757s" podCreationTimestamp="2025-12-02 18:59:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:33.236233494 +0000 UTC m=+1404.009125822" watchObservedRunningTime="2025-12-02 18:59:33.329004757 +0000 UTC m=+1404.101897085" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.362392 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-426b-account-create-update-7chxn" podStartSLOduration=2.362370502 podStartE2EDuration="2.362370502s" podCreationTimestamp="2025-12-02 18:59:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:33.269260579 +0000 UTC m=+1404.042152897" watchObservedRunningTime="2025-12-02 18:59:33.362370502 +0000 UTC m=+1404.135262820" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.562333 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a165cea6-0a2d-4386-8db8-aaf30d21d213" path="/var/lib/kubelet/pods/a165cea6-0a2d-4386-8db8-aaf30d21d213/volumes" Dec 02 18:59:33 crc kubenswrapper[4792]: I1202 18:59:33.788472 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.234645 4792 generic.go:334] "Generic (PLEG): container finished" podID="e3c55368-83f1-4bc7-921d-44111791eb23" containerID="91d316db9ad2573185af2ae8cdfc729352152d73d593db5d1cfcd17bfa3d669a" exitCode=0 Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.234937 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-426b-account-create-update-7chxn" event={"ID":"e3c55368-83f1-4bc7-921d-44111791eb23","Type":"ContainerDied","Data":"91d316db9ad2573185af2ae8cdfc729352152d73d593db5d1cfcd17bfa3d669a"} Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.239807 4792 generic.go:334] "Generic (PLEG): container finished" podID="68e98332-53c4-4f16-85cb-608e7b01e41b" containerID="059dea24a93777e8673f82fa81f07ee95814961213e6b30f17bbc62c0cad0ed3" exitCode=0 Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.239870 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1663-account-create-update-8xcft" event={"ID":"68e98332-53c4-4f16-85cb-608e7b01e41b","Type":"ContainerDied","Data":"059dea24a93777e8673f82fa81f07ee95814961213e6b30f17bbc62c0cad0ed3"} Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.254210 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerStarted","Data":"5150587d89c412e3e08d5d3b72eb5ad59f27016995e655b62f45b341ecc680f8"} Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.268880 4792 generic.go:334] "Generic (PLEG): container finished" podID="85b7260d-e208-454c-b9c3-2de2ff32d356" containerID="d535be81f937d6a08c822cc0ef5cceb6647f45b1d2b8a0f903453fda1bcc3b5e" exitCode=0 Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.268977 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" event={"ID":"85b7260d-e208-454c-b9c3-2de2ff32d356","Type":"ContainerDied","Data":"d535be81f937d6a08c822cc0ef5cceb6647f45b1d2b8a0f903453fda1bcc3b5e"} Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.272774 4792 generic.go:334] "Generic (PLEG): container finished" podID="80838be7-b865-482d-a009-3338e3328a3d" containerID="96a66e081d2b958701ad9e4cf60c5b3a4b3623f12c93def8e8e345258766272f" exitCode=0 Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.272832 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-sbnrq" event={"ID":"80838be7-b865-482d-a009-3338e3328a3d","Type":"ContainerDied","Data":"96a66e081d2b958701ad9e4cf60c5b3a4b3623f12c93def8e8e345258766272f"} Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.277274 4792 generic.go:334] "Generic (PLEG): container finished" podID="84ebdf59-d7dc-4bcd-ace8-76dadb52d06a" containerID="613ff8dfa3ac486cbef5560a93d5b8e8fbc70b7ef735f6c4f86b5be131b12ded" exitCode=0 Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.277658 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lh6kq" event={"ID":"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a","Type":"ContainerDied","Data":"613ff8dfa3ac486cbef5560a93d5b8e8fbc70b7ef735f6c4f86b5be131b12ded"} Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.745701 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.932043 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwwvx\" (UniqueName: \"kubernetes.io/projected/2675e4dc-601e-4d2f-9fe9-db69ca73b109-kube-api-access-xwwvx\") pod \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.932228 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2675e4dc-601e-4d2f-9fe9-db69ca73b109-operator-scripts\") pod \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\" (UID: \"2675e4dc-601e-4d2f-9fe9-db69ca73b109\") " Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.932738 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2675e4dc-601e-4d2f-9fe9-db69ca73b109-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2675e4dc-601e-4d2f-9fe9-db69ca73b109" (UID: "2675e4dc-601e-4d2f-9fe9-db69ca73b109"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:34 crc kubenswrapper[4792]: I1202 18:59:34.944697 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2675e4dc-601e-4d2f-9fe9-db69ca73b109-kube-api-access-xwwvx" (OuterVolumeSpecName: "kube-api-access-xwwvx") pod "2675e4dc-601e-4d2f-9fe9-db69ca73b109" (UID: "2675e4dc-601e-4d2f-9fe9-db69ca73b109"). InnerVolumeSpecName "kube-api-access-xwwvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.034947 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2675e4dc-601e-4d2f-9fe9-db69ca73b109-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.034978 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwwvx\" (UniqueName: \"kubernetes.io/projected/2675e4dc-601e-4d2f-9fe9-db69ca73b109-kube-api-access-xwwvx\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.126030 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.299192 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerStarted","Data":"2c5bf2758abc6698c72fa1104bf586bd08191a488adcd5fe019494ff78497d20"} Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.306295 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ccnq7" Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.306335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-ccnq7" event={"ID":"2675e4dc-601e-4d2f-9fe9-db69ca73b109","Type":"ContainerDied","Data":"4f3c07a5a2be39a3fe4d931336bb96fdf5628bb383c2570a9aee2f88e10daa01"} Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.306393 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f3c07a5a2be39a3fe4d931336bb96fdf5628bb383c2570a9aee2f88e10daa01" Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.906153 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.906951 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-log" containerID="cri-o://145bf06aca750b3bdf51332770863e24fd73368dfbe685596acbc8bf9444dc4b" gracePeriod=30 Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.907543 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-httpd" containerID="cri-o://a554100aefd6e5dd5c34558cc0db5e362dd200bc121265727fce77288da98872" gracePeriod=30 Dec 02 18:59:35 crc kubenswrapper[4792]: I1202 18:59:35.932815 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.062030 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-operator-scripts\") pod \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.062155 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knpm6\" (UniqueName: \"kubernetes.io/projected/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-kube-api-access-knpm6\") pod \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\" (UID: \"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.065035 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "84ebdf59-d7dc-4bcd-ace8-76dadb52d06a" (UID: "84ebdf59-d7dc-4bcd-ace8-76dadb52d06a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.067962 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-kube-api-access-knpm6" (OuterVolumeSpecName: "kube-api-access-knpm6") pod "84ebdf59-d7dc-4bcd-ace8-76dadb52d06a" (UID: "84ebdf59-d7dc-4bcd-ace8-76dadb52d06a"). InnerVolumeSpecName "kube-api-access-knpm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.108948 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.129805 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.154761 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.164335 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.164362 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knpm6\" (UniqueName: \"kubernetes.io/projected/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a-kube-api-access-knpm6\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.167868 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.265528 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3c55368-83f1-4bc7-921d-44111791eb23-operator-scripts\") pod \"e3c55368-83f1-4bc7-921d-44111791eb23\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.265752 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68e98332-53c4-4f16-85cb-608e7b01e41b-operator-scripts\") pod \"68e98332-53c4-4f16-85cb-608e7b01e41b\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.265829 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80838be7-b865-482d-a009-3338e3328a3d-operator-scripts\") pod \"80838be7-b865-482d-a009-3338e3328a3d\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.265940 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k9sl\" (UniqueName: \"kubernetes.io/projected/80838be7-b865-482d-a009-3338e3328a3d-kube-api-access-8k9sl\") pod \"80838be7-b865-482d-a009-3338e3328a3d\" (UID: \"80838be7-b865-482d-a009-3338e3328a3d\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.265974 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b7260d-e208-454c-b9c3-2de2ff32d356-operator-scripts\") pod \"85b7260d-e208-454c-b9c3-2de2ff32d356\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.266017 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tjlv\" (UniqueName: \"kubernetes.io/projected/68e98332-53c4-4f16-85cb-608e7b01e41b-kube-api-access-8tjlv\") pod \"68e98332-53c4-4f16-85cb-608e7b01e41b\" (UID: \"68e98332-53c4-4f16-85cb-608e7b01e41b\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.266049 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4zl7\" (UniqueName: \"kubernetes.io/projected/85b7260d-e208-454c-b9c3-2de2ff32d356-kube-api-access-j4zl7\") pod \"85b7260d-e208-454c-b9c3-2de2ff32d356\" (UID: \"85b7260d-e208-454c-b9c3-2de2ff32d356\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.266067 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmxpd\" (UniqueName: \"kubernetes.io/projected/e3c55368-83f1-4bc7-921d-44111791eb23-kube-api-access-wmxpd\") pod \"e3c55368-83f1-4bc7-921d-44111791eb23\" (UID: \"e3c55368-83f1-4bc7-921d-44111791eb23\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.266548 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3c55368-83f1-4bc7-921d-44111791eb23-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e3c55368-83f1-4bc7-921d-44111791eb23" (UID: "e3c55368-83f1-4bc7-921d-44111791eb23"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.266610 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68e98332-53c4-4f16-85cb-608e7b01e41b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "68e98332-53c4-4f16-85cb-608e7b01e41b" (UID: "68e98332-53c4-4f16-85cb-608e7b01e41b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.266878 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85b7260d-e208-454c-b9c3-2de2ff32d356-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85b7260d-e208-454c-b9c3-2de2ff32d356" (UID: "85b7260d-e208-454c-b9c3-2de2ff32d356"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.267050 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b7260d-e208-454c-b9c3-2de2ff32d356-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.267071 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3c55368-83f1-4bc7-921d-44111791eb23-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.267088 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80838be7-b865-482d-a009-3338e3328a3d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "80838be7-b865-482d-a009-3338e3328a3d" (UID: "80838be7-b865-482d-a009-3338e3328a3d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.267097 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68e98332-53c4-4f16-85cb-608e7b01e41b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.271534 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85b7260d-e208-454c-b9c3-2de2ff32d356-kube-api-access-j4zl7" (OuterVolumeSpecName: "kube-api-access-j4zl7") pod "85b7260d-e208-454c-b9c3-2de2ff32d356" (UID: "85b7260d-e208-454c-b9c3-2de2ff32d356"). InnerVolumeSpecName "kube-api-access-j4zl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.278428 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68e98332-53c4-4f16-85cb-608e7b01e41b-kube-api-access-8tjlv" (OuterVolumeSpecName: "kube-api-access-8tjlv") pod "68e98332-53c4-4f16-85cb-608e7b01e41b" (UID: "68e98332-53c4-4f16-85cb-608e7b01e41b"). InnerVolumeSpecName "kube-api-access-8tjlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.285785 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3c55368-83f1-4bc7-921d-44111791eb23-kube-api-access-wmxpd" (OuterVolumeSpecName: "kube-api-access-wmxpd") pod "e3c55368-83f1-4bc7-921d-44111791eb23" (UID: "e3c55368-83f1-4bc7-921d-44111791eb23"). InnerVolumeSpecName "kube-api-access-wmxpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.288180 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80838be7-b865-482d-a009-3338e3328a3d-kube-api-access-8k9sl" (OuterVolumeSpecName: "kube-api-access-8k9sl") pod "80838be7-b865-482d-a009-3338e3328a3d" (UID: "80838be7-b865-482d-a009-3338e3328a3d"). InnerVolumeSpecName "kube-api-access-8k9sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.336330 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" event={"ID":"85b7260d-e208-454c-b9c3-2de2ff32d356","Type":"ContainerDied","Data":"dccecc959b4276f880b3cb4dad4e270feb83150ab2ae1fe2d46a91def2b13686"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.336369 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dccecc959b4276f880b3cb4dad4e270feb83150ab2ae1fe2d46a91def2b13686" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.336456 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4ebd-account-create-update-qqlvf" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.344816 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-sbnrq" event={"ID":"80838be7-b865-482d-a009-3338e3328a3d","Type":"ContainerDied","Data":"d6c5d3fbb146c605bdee89c4ef83d0581daaf979de06df41466d2ab6071c04ac"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.344849 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6c5d3fbb146c605bdee89c4ef83d0581daaf979de06df41466d2ab6071c04ac" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.344906 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sbnrq" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.356401 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-lh6kq" event={"ID":"84ebdf59-d7dc-4bcd-ace8-76dadb52d06a","Type":"ContainerDied","Data":"e746141d40c2d303eee3744e6215eeb328a16461012d16c772e97e125b217fea"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.356443 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e746141d40c2d303eee3744e6215eeb328a16461012d16c772e97e125b217fea" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.356516 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-lh6kq" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.365124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerStarted","Data":"cb3e5581d1629ec5be006d82da763064d24779c5504e7d06f8e3ed2b51c7daf4"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.365168 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerStarted","Data":"7f40b662c374b2ba1ac05c64023ecd177f57b3fa1580eef0822bccac917d6fdf"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.369410 4792 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80838be7-b865-482d-a009-3338e3328a3d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.369432 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k9sl\" (UniqueName: \"kubernetes.io/projected/80838be7-b865-482d-a009-3338e3328a3d-kube-api-access-8k9sl\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.369441 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tjlv\" (UniqueName: \"kubernetes.io/projected/68e98332-53c4-4f16-85cb-608e7b01e41b-kube-api-access-8tjlv\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.369473 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4zl7\" (UniqueName: \"kubernetes.io/projected/85b7260d-e208-454c-b9c3-2de2ff32d356-kube-api-access-j4zl7\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.369482 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmxpd\" (UniqueName: \"kubernetes.io/projected/e3c55368-83f1-4bc7-921d-44111791eb23-kube-api-access-wmxpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.375608 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-426b-account-create-update-7chxn" event={"ID":"e3c55368-83f1-4bc7-921d-44111791eb23","Type":"ContainerDied","Data":"6aeaf1dd210a2a5ede9e7184fc87e584ca01802a291bcc5e18b31473aa54a4ea"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.375638 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6aeaf1dd210a2a5ede9e7184fc87e584ca01802a291bcc5e18b31473aa54a4ea" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.375694 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-426b-account-create-update-7chxn" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.382071 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1663-account-create-update-8xcft" event={"ID":"68e98332-53c4-4f16-85cb-608e7b01e41b","Type":"ContainerDied","Data":"49062e1df09850dc9290d755b9ffcde85f531c13679a30f0eedcb7b0167b2b60"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.382110 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49062e1df09850dc9290d755b9ffcde85f531c13679a30f0eedcb7b0167b2b60" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.382172 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1663-account-create-update-8xcft" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.393677 4792 generic.go:334] "Generic (PLEG): container finished" podID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerID="d10a40ec1c6dd79d132d13e2508e96c3c0c660d6f43d15aa0c96c97cc90c45c3" exitCode=0 Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.393758 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"093444e9-8e53-44ae-bbd3-efcb0c374905","Type":"ContainerDied","Data":"d10a40ec1c6dd79d132d13e2508e96c3c0c660d6f43d15aa0c96c97cc90c45c3"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.395782 4792 generic.go:334] "Generic (PLEG): container finished" podID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerID="145bf06aca750b3bdf51332770863e24fd73368dfbe685596acbc8bf9444dc4b" exitCode=143 Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.395802 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"244d824b-faf3-4749-adeb-b3c5c13004b4","Type":"ContainerDied","Data":"145bf06aca750b3bdf51332770863e24fd73368dfbe685596acbc8bf9444dc4b"} Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.470917 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.573614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-logs\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.573764 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-public-tls-certs\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.573810 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-httpd-run\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.573837 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-scripts\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.573948 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.573984 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5thf5\" (UniqueName: \"kubernetes.io/projected/093444e9-8e53-44ae-bbd3-efcb0c374905-kube-api-access-5thf5\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.574064 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-config-data\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.574119 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-combined-ca-bundle\") pod \"093444e9-8e53-44ae-bbd3-efcb0c374905\" (UID: \"093444e9-8e53-44ae-bbd3-efcb0c374905\") " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.574887 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-logs" (OuterVolumeSpecName: "logs") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.577724 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.581515 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/093444e9-8e53-44ae-bbd3-efcb0c374905-kube-api-access-5thf5" (OuterVolumeSpecName: "kube-api-access-5thf5") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "kube-api-access-5thf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.583737 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-scripts" (OuterVolumeSpecName: "scripts") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.618611 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7" (OuterVolumeSpecName: "glance") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.657223 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.662355 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.673084 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-config-data" (OuterVolumeSpecName: "config-data") pod "093444e9-8e53-44ae-bbd3-efcb0c374905" (UID: "093444e9-8e53-44ae-bbd3-efcb0c374905"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676744 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676770 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676780 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676790 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/093444e9-8e53-44ae-bbd3-efcb0c374905-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676802 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676828 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") on node \"crc\" " Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676838 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5thf5\" (UniqueName: \"kubernetes.io/projected/093444e9-8e53-44ae-bbd3-efcb0c374905-kube-api-access-5thf5\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.676848 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/093444e9-8e53-44ae-bbd3-efcb0c374905-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.713781 4792 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.713941 4792 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7") on node "crc" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.778072 4792 reconciler_common.go:293] "Volume detached for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:36 crc kubenswrapper[4792]: I1202 18:59:36.948318 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.409911 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"093444e9-8e53-44ae-bbd3-efcb0c374905","Type":"ContainerDied","Data":"60cffb2de202197fba9908b0e982444621d9e79f6feb06983d62fd520c27f97c"} Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.409965 4792 scope.go:117] "RemoveContainer" containerID="d10a40ec1c6dd79d132d13e2508e96c3c0c660d6f43d15aa0c96c97cc90c45c3" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.410005 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.434805 4792 scope.go:117] "RemoveContainer" containerID="86bcb694850473f45b157cab869bfab13bb5d124c31275ba77dd4e7dd0f23b8d" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.454026 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.469066 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.505908 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506385 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e98332-53c4-4f16-85cb-608e7b01e41b" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506404 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e98332-53c4-4f16-85cb-608e7b01e41b" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506427 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b7260d-e208-454c-b9c3-2de2ff32d356" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506434 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b7260d-e208-454c-b9c3-2de2ff32d356" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506453 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2675e4dc-601e-4d2f-9fe9-db69ca73b109" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506458 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="2675e4dc-601e-4d2f-9fe9-db69ca73b109" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506477 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-httpd" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506483 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-httpd" Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506492 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-log" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506497 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-log" Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506506 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ebdf59-d7dc-4bcd-ace8-76dadb52d06a" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506513 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ebdf59-d7dc-4bcd-ace8-76dadb52d06a" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506542 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80838be7-b865-482d-a009-3338e3328a3d" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506548 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="80838be7-b865-482d-a009-3338e3328a3d" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: E1202 18:59:37.506560 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3c55368-83f1-4bc7-921d-44111791eb23" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.506565 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3c55368-83f1-4bc7-921d-44111791eb23" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507607 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b7260d-e208-454c-b9c3-2de2ff32d356" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507637 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-log" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507653 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="2675e4dc-601e-4d2f-9fe9-db69ca73b109" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507664 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="80838be7-b865-482d-a009-3338e3328a3d" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507673 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3c55368-83f1-4bc7-921d-44111791eb23" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507680 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ebdf59-d7dc-4bcd-ace8-76dadb52d06a" containerName="mariadb-database-create" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507690 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" containerName="glance-httpd" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.507700 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="68e98332-53c4-4f16-85cb-608e7b01e41b" containerName="mariadb-account-create-update" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.508759 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.513951 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.516948 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.517056 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.560063 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="093444e9-8e53-44ae-bbd3-efcb0c374905" path="/var/lib/kubelet/pods/093444e9-8e53-44ae-bbd3-efcb0c374905/volumes" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.696625 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.696955 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.697002 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-scripts\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.697030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-config-data\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.697187 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.697252 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.697290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-logs\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.697415 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5k8j\" (UniqueName: \"kubernetes.io/projected/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-kube-api-access-w5k8j\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.799882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5k8j\" (UniqueName: \"kubernetes.io/projected/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-kube-api-access-w5k8j\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800024 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800056 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800107 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-scripts\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800131 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-config-data\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800185 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800212 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-logs\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.800927 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-logs\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.801225 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.805444 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.805483 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7340b972542f32c74446fd2d8820f10387f1320bf84336609d21f0bd206378e7/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.805863 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.808514 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-scripts\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.808592 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.809768 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-config-data\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.827301 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5k8j\" (UniqueName: \"kubernetes.io/projected/5d2d8dfe-ec8c-4c51-850a-1e25165a6826-kube-api-access-w5k8j\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:37 crc kubenswrapper[4792]: I1202 18:59:37.862811 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b2507f2-3d4c-435e-95ab-efbbd47db9d7\") pod \"glance-default-external-api-0\" (UID: \"5d2d8dfe-ec8c-4c51-850a-1e25165a6826\") " pod="openstack/glance-default-external-api-0" Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.156828 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.425102 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerStarted","Data":"4fe86ac2ba0fc0060139cb72993d04c5d714627d2770c04f8b1af3b984869601"} Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.425422 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.425412 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-central-agent" containerID="cri-o://2c5bf2758abc6698c72fa1104bf586bd08191a488adcd5fe019494ff78497d20" gracePeriod=30 Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.425564 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="proxy-httpd" containerID="cri-o://4fe86ac2ba0fc0060139cb72993d04c5d714627d2770c04f8b1af3b984869601" gracePeriod=30 Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.425607 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="sg-core" containerID="cri-o://cb3e5581d1629ec5be006d82da763064d24779c5504e7d06f8e3ed2b51c7daf4" gracePeriod=30 Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.425649 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-notification-agent" containerID="cri-o://7f40b662c374b2ba1ac05c64023ecd177f57b3fa1580eef0822bccac917d6fdf" gracePeriod=30 Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.461943 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.8288622500000002 podStartE2EDuration="6.46192652s" podCreationTimestamp="2025-12-02 18:59:32 +0000 UTC" firstStartedPulling="2025-12-02 18:59:33.778357341 +0000 UTC m=+1404.551249669" lastFinishedPulling="2025-12-02 18:59:37.411421611 +0000 UTC m=+1408.184313939" observedRunningTime="2025-12-02 18:59:38.456844499 +0000 UTC m=+1409.229736827" watchObservedRunningTime="2025-12-02 18:59:38.46192652 +0000 UTC m=+1409.234818848" Dec 02 18:59:38 crc kubenswrapper[4792]: I1202 18:59:38.728663 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 18:59:38 crc kubenswrapper[4792]: W1202 18:59:38.736386 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d2d8dfe_ec8c_4c51_850a_1e25165a6826.slice/crio-967a27f4fac939843b69feb1a52c2cc685ca9adb1b4462c615a263500e1a2cf0 WatchSource:0}: Error finding container 967a27f4fac939843b69feb1a52c2cc685ca9adb1b4462c615a263500e1a2cf0: Status 404 returned error can't find the container with id 967a27f4fac939843b69feb1a52c2cc685ca9adb1b4462c615a263500e1a2cf0 Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.441400 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5d2d8dfe-ec8c-4c51-850a-1e25165a6826","Type":"ContainerStarted","Data":"ad4e904d301d44dede8338d6f6eedddc9c5fbb4590fa3bf0b8d255f3824a6b10"} Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.441980 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5d2d8dfe-ec8c-4c51-850a-1e25165a6826","Type":"ContainerStarted","Data":"967a27f4fac939843b69feb1a52c2cc685ca9adb1b4462c615a263500e1a2cf0"} Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.445685 4792 generic.go:334] "Generic (PLEG): container finished" podID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerID="4fe86ac2ba0fc0060139cb72993d04c5d714627d2770c04f8b1af3b984869601" exitCode=0 Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.445729 4792 generic.go:334] "Generic (PLEG): container finished" podID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerID="cb3e5581d1629ec5be006d82da763064d24779c5504e7d06f8e3ed2b51c7daf4" exitCode=2 Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.445748 4792 generic.go:334] "Generic (PLEG): container finished" podID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerID="7f40b662c374b2ba1ac05c64023ecd177f57b3fa1580eef0822bccac917d6fdf" exitCode=0 Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.445835 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerDied","Data":"4fe86ac2ba0fc0060139cb72993d04c5d714627d2770c04f8b1af3b984869601"} Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.445900 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerDied","Data":"cb3e5581d1629ec5be006d82da763064d24779c5504e7d06f8e3ed2b51c7daf4"} Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.446012 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerDied","Data":"7f40b662c374b2ba1ac05c64023ecd177f57b3fa1580eef0822bccac917d6fdf"} Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.448430 4792 generic.go:334] "Generic (PLEG): container finished" podID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerID="a554100aefd6e5dd5c34558cc0db5e362dd200bc121265727fce77288da98872" exitCode=0 Dec 02 18:59:39 crc kubenswrapper[4792]: I1202 18:59:39.448480 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"244d824b-faf3-4749-adeb-b3c5c13004b4","Type":"ContainerDied","Data":"a554100aefd6e5dd5c34558cc0db5e362dd200bc121265727fce77288da98872"} Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.070069 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.138329 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-79bc665747-kkc2q" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.160587 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.160669 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-combined-ca-bundle\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.160802 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-internal-tls-certs\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.160870 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-httpd-run\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.160934 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgtfw\" (UniqueName: \"kubernetes.io/projected/244d824b-faf3-4749-adeb-b3c5c13004b4-kube-api-access-dgtfw\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.160961 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-config-data\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.162098 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-logs\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.162284 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-scripts\") pod \"244d824b-faf3-4749-adeb-b3c5c13004b4\" (UID: \"244d824b-faf3-4749-adeb-b3c5c13004b4\") " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.170033 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.174501 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-logs" (OuterVolumeSpecName: "logs") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.177815 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-scripts" (OuterVolumeSpecName: "scripts") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.179660 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/244d824b-faf3-4749-adeb-b3c5c13004b4-kube-api-access-dgtfw" (OuterVolumeSpecName: "kube-api-access-dgtfw") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "kube-api-access-dgtfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.214714 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45" (OuterVolumeSpecName: "glance") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.249652 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.267721 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") on node \"crc\" " Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.267747 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.267758 4792 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.267766 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgtfw\" (UniqueName: \"kubernetes.io/projected/244d824b-faf3-4749-adeb-b3c5c13004b4-kube-api-access-dgtfw\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.267775 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244d824b-faf3-4749-adeb-b3c5c13004b4-logs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.267783 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.276650 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.293783 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-config-data" (OuterVolumeSpecName: "config-data") pod "244d824b-faf3-4749-adeb-b3c5c13004b4" (UID: "244d824b-faf3-4749-adeb-b3c5c13004b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.296504 4792 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.296794 4792 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45") on node "crc" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.369593 4792 reconciler_common.go:293] "Volume detached for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.369621 4792 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.369633 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244d824b-faf3-4749-adeb-b3c5c13004b4-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.469977 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"244d824b-faf3-4749-adeb-b3c5c13004b4","Type":"ContainerDied","Data":"e5716b5f51fd8eea71c5d651c2cb8fb6e447d954d8ea1bb1e48a68571c3ed461"} Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.470054 4792 scope.go:117] "RemoveContainer" containerID="a554100aefd6e5dd5c34558cc0db5e362dd200bc121265727fce77288da98872" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.470261 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.474718 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5d2d8dfe-ec8c-4c51-850a-1e25165a6826","Type":"ContainerStarted","Data":"028bf01d27b41d213642690d3efdc859bb7b6997f0a7bbd60014e4766846b334"} Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.506443 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.506422848 podStartE2EDuration="3.506422848s" podCreationTimestamp="2025-12-02 18:59:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:40.496679055 +0000 UTC m=+1411.269571383" watchObservedRunningTime="2025-12-02 18:59:40.506422848 +0000 UTC m=+1411.279315176" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.509812 4792 scope.go:117] "RemoveContainer" containerID="145bf06aca750b3bdf51332770863e24fd73368dfbe685596acbc8bf9444dc4b" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.565504 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.588691 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.608817 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:59:40 crc kubenswrapper[4792]: E1202 18:59:40.609221 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-log" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.609237 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-log" Dec 02 18:59:40 crc kubenswrapper[4792]: E1202 18:59:40.609269 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-httpd" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.609276 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-httpd" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.609452 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-log" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.609484 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" containerName="glance-httpd" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.610503 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.612390 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.612640 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.621546 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.781129 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.781193 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8kzh\" (UniqueName: \"kubernetes.io/projected/c0b6301c-c44b-4f68-b11f-59e05346f689-kube-api-access-q8kzh\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.781219 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.781290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.781312 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.781694 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0b6301c-c44b-4f68-b11f-59e05346f689-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.781939 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b6301c-c44b-4f68-b11f-59e05346f689-logs\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.782083 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884007 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884080 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8kzh\" (UniqueName: \"kubernetes.io/projected/c0b6301c-c44b-4f68-b11f-59e05346f689-kube-api-access-q8kzh\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884101 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884179 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884218 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0b6301c-c44b-4f68-b11f-59e05346f689-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884266 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b6301c-c44b-4f68-b11f-59e05346f689-logs\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.884292 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.885088 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0b6301c-c44b-4f68-b11f-59e05346f689-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.885241 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0b6301c-c44b-4f68-b11f-59e05346f689-logs\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.888690 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.888726 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/80e64c4b1f5f631bf26b32fa972d35244bcdcbb9d2d00ddd0ab5edaa6a730928/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.889011 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.889622 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.899844 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.904816 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b6301c-c44b-4f68-b11f-59e05346f689-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.909607 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8kzh\" (UniqueName: \"kubernetes.io/projected/c0b6301c-c44b-4f68-b11f-59e05346f689-kube-api-access-q8kzh\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:40 crc kubenswrapper[4792]: I1202 18:59:40.931111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a6cbd4c-785c-4ada-876d-0f0fb655df45\") pod \"glance-default-internal-api-0\" (UID: \"c0b6301c-c44b-4f68-b11f-59e05346f689\") " pod="openstack/glance-default-internal-api-0" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.229153 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.558753 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="244d824b-faf3-4749-adeb-b3c5c13004b4" path="/var/lib/kubelet/pods/244d824b-faf3-4749-adeb-b3c5c13004b4/volumes" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.606248 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jh2dc"] Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.611029 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.613249 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.613484 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-68v5w" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.613696 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.627062 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jh2dc"] Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.700208 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5j6l\" (UniqueName: \"kubernetes.io/projected/c750063f-56d2-47d7-9237-a7ab6a26f6a2-kube-api-access-b5j6l\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.700261 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.700466 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-scripts\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.700571 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-config-data\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.789975 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.802713 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-scripts\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.802784 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-config-data\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.802816 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5j6l\" (UniqueName: \"kubernetes.io/projected/c750063f-56d2-47d7-9237-a7ab6a26f6a2-kube-api-access-b5j6l\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.802842 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.808966 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-scripts\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.810455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-config-data\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.812986 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.831024 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5j6l\" (UniqueName: \"kubernetes.io/projected/c750063f-56d2-47d7-9237-a7ab6a26f6a2-kube-api-access-b5j6l\") pod \"nova-cell0-conductor-db-sync-jh2dc\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:41 crc kubenswrapper[4792]: I1202 18:59:41.932814 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.511553 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0b6301c-c44b-4f68-b11f-59e05346f689","Type":"ContainerStarted","Data":"7a19be77e8e66d834a9b4ba44c6fe62dcb4fe1f24b035ec722fd9bb7ed67a035"} Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.519032 4792 generic.go:334] "Generic (PLEG): container finished" podID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerID="2c5bf2758abc6698c72fa1104bf586bd08191a488adcd5fe019494ff78497d20" exitCode=0 Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.519062 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerDied","Data":"2c5bf2758abc6698c72fa1104bf586bd08191a488adcd5fe019494ff78497d20"} Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.519101 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6b9e7c9-1f70-47cd-a783-99d77de03a6b","Type":"ContainerDied","Data":"5150587d89c412e3e08d5d3b72eb5ad59f27016995e655b62f45b341ecc680f8"} Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.519111 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5150587d89c412e3e08d5d3b72eb5ad59f27016995e655b62f45b341ecc680f8" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.571378 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.673163 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jh2dc"] Dec 02 18:59:42 crc kubenswrapper[4792]: W1202 18:59:42.677433 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc750063f_56d2_47d7_9237_a7ab6a26f6a2.slice/crio-344567230a7a8791dbd77471976cc73f32ee571b0359e7e088c4c006c207bca1 WatchSource:0}: Error finding container 344567230a7a8791dbd77471976cc73f32ee571b0359e7e088c4c006c207bca1: Status 404 returned error can't find the container with id 344567230a7a8791dbd77471976cc73f32ee571b0359e7e088c4c006c207bca1 Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zwd5\" (UniqueName: \"kubernetes.io/projected/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-kube-api-access-9zwd5\") pod \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730265 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-run-httpd\") pod \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730333 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-combined-ca-bundle\") pod \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730386 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-sg-core-conf-yaml\") pod \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730440 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-config-data\") pod \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730495 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-scripts\") pod \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730557 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-log-httpd\") pod \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\" (UID: \"e6b9e7c9-1f70-47cd-a783-99d77de03a6b\") " Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.730915 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e6b9e7c9-1f70-47cd-a783-99d77de03a6b" (UID: "e6b9e7c9-1f70-47cd-a783-99d77de03a6b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.731245 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e6b9e7c9-1f70-47cd-a783-99d77de03a6b" (UID: "e6b9e7c9-1f70-47cd-a783-99d77de03a6b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.736624 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-kube-api-access-9zwd5" (OuterVolumeSpecName: "kube-api-access-9zwd5") pod "e6b9e7c9-1f70-47cd-a783-99d77de03a6b" (UID: "e6b9e7c9-1f70-47cd-a783-99d77de03a6b"). InnerVolumeSpecName "kube-api-access-9zwd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.738262 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-scripts" (OuterVolumeSpecName: "scripts") pod "e6b9e7c9-1f70-47cd-a783-99d77de03a6b" (UID: "e6b9e7c9-1f70-47cd-a783-99d77de03a6b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.763979 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e6b9e7c9-1f70-47cd-a783-99d77de03a6b" (UID: "e6b9e7c9-1f70-47cd-a783-99d77de03a6b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.831276 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6b9e7c9-1f70-47cd-a783-99d77de03a6b" (UID: "e6b9e7c9-1f70-47cd-a783-99d77de03a6b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.832511 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.833861 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.833886 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.833897 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.833905 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.833915 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zwd5\" (UniqueName: \"kubernetes.io/projected/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-kube-api-access-9zwd5\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.858892 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-config-data" (OuterVolumeSpecName: "config-data") pod "e6b9e7c9-1f70-47cd-a783-99d77de03a6b" (UID: "e6b9e7c9-1f70-47cd-a783-99d77de03a6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:42 crc kubenswrapper[4792]: I1202 18:59:42.935648 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6b9e7c9-1f70-47cd-a783-99d77de03a6b-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.532871 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" event={"ID":"c750063f-56d2-47d7-9237-a7ab6a26f6a2","Type":"ContainerStarted","Data":"344567230a7a8791dbd77471976cc73f32ee571b0359e7e088c4c006c207bca1"} Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.537095 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0b6301c-c44b-4f68-b11f-59e05346f689","Type":"ContainerStarted","Data":"01ad62f7c8af271511687f3e277eea3940e013ac31f183b3700076d2d5bd97c1"} Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.537166 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.577680 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.588654 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.605217 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:43 crc kubenswrapper[4792]: E1202 18:59:43.605770 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-central-agent" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.605786 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-central-agent" Dec 02 18:59:43 crc kubenswrapper[4792]: E1202 18:59:43.605806 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-notification-agent" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.605815 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-notification-agent" Dec 02 18:59:43 crc kubenswrapper[4792]: E1202 18:59:43.605853 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="sg-core" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.605862 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="sg-core" Dec 02 18:59:43 crc kubenswrapper[4792]: E1202 18:59:43.605882 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="proxy-httpd" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.605892 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="proxy-httpd" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.606162 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="proxy-httpd" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.606187 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="sg-core" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.606196 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-notification-agent" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.606213 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" containerName="ceilometer-central-agent" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.609498 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.612167 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.612587 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.621308 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.647927 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-config-data\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.647973 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-scripts\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.648002 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.648029 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-log-httpd\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.648050 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-run-httpd\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.648156 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.648174 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsp47\" (UniqueName: \"kubernetes.io/projected/6289387e-3bb7-49ef-a910-8f5277113b97-kube-api-access-hsp47\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.749337 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-run-httpd\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.749493 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.749532 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsp47\" (UniqueName: \"kubernetes.io/projected/6289387e-3bb7-49ef-a910-8f5277113b97-kube-api-access-hsp47\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.749567 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-config-data\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.749592 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-scripts\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.749621 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.749648 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-log-httpd\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.750036 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-run-httpd\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.750401 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-log-httpd\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.753950 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.754009 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.754188 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-scripts\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.756189 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-config-data\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.765248 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsp47\" (UniqueName: \"kubernetes.io/projected/6289387e-3bb7-49ef-a910-8f5277113b97-kube-api-access-hsp47\") pod \"ceilometer-0\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " pod="openstack/ceilometer-0" Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.913954 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:43 crc kubenswrapper[4792]: I1202 18:59:43.915078 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:44 crc kubenswrapper[4792]: I1202 18:59:44.391718 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:44 crc kubenswrapper[4792]: I1202 18:59:44.567632 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerStarted","Data":"d80703ec42ec5f0b9dcb47a4202ff740ac1ef09bc2a3c1d6afd8969fdc07da35"} Dec 02 18:59:44 crc kubenswrapper[4792]: I1202 18:59:44.570859 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0b6301c-c44b-4f68-b11f-59e05346f689","Type":"ContainerStarted","Data":"3817783f55b63165e55b60c2f669efd5478c457eb8031371ca70efe33c442d26"} Dec 02 18:59:44 crc kubenswrapper[4792]: I1202 18:59:44.598759 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.598740417 podStartE2EDuration="4.598740417s" podCreationTimestamp="2025-12-02 18:59:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:59:44.598189883 +0000 UTC m=+1415.371082231" watchObservedRunningTime="2025-12-02 18:59:44.598740417 +0000 UTC m=+1415.371632745" Dec 02 18:59:45 crc kubenswrapper[4792]: I1202 18:59:45.552809 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6b9e7c9-1f70-47cd-a783-99d77de03a6b" path="/var/lib/kubelet/pods/e6b9e7c9-1f70-47cd-a783-99d77de03a6b/volumes" Dec 02 18:59:45 crc kubenswrapper[4792]: I1202 18:59:45.584148 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerStarted","Data":"235ddcb5467cde2032c32523cecb58c63cc6c0fa70a16daed83ea4135426275a"} Dec 02 18:59:46 crc kubenswrapper[4792]: I1202 18:59:46.594457 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerStarted","Data":"9a79a68507a5a8cdc89cfe92afa51e9aa9718bc6b2a86daac2d4281e104c4964"} Dec 02 18:59:46 crc kubenswrapper[4792]: I1202 18:59:46.596120 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerStarted","Data":"c95bcc8843b0181781f0ac4a9e58461f53b55eae8de8ac0e60563d5852c08d5f"} Dec 02 18:59:48 crc kubenswrapper[4792]: I1202 18:59:48.157960 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 18:59:48 crc kubenswrapper[4792]: I1202 18:59:48.158874 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 18:59:48 crc kubenswrapper[4792]: I1202 18:59:48.227001 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 18:59:48 crc kubenswrapper[4792]: I1202 18:59:48.236503 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 18:59:48 crc kubenswrapper[4792]: I1202 18:59:48.629420 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 18:59:48 crc kubenswrapper[4792]: I1202 18:59:48.629739 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 18:59:51 crc kubenswrapper[4792]: I1202 18:59:51.229416 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:51 crc kubenswrapper[4792]: I1202 18:59:51.230031 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:51 crc kubenswrapper[4792]: I1202 18:59:51.270803 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:51 crc kubenswrapper[4792]: I1202 18:59:51.287780 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:51 crc kubenswrapper[4792]: I1202 18:59:51.660722 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:51 crc kubenswrapper[4792]: I1202 18:59:51.660764 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:52 crc kubenswrapper[4792]: I1202 18:59:52.092529 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 18:59:52 crc kubenswrapper[4792]: I1202 18:59:52.092613 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:59:52 crc kubenswrapper[4792]: I1202 18:59:52.111070 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.681193 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" event={"ID":"c750063f-56d2-47d7-9237-a7ab6a26f6a2","Type":"ContainerStarted","Data":"96c8a20700cfe76e4fea8b6e0ea62097ccb2d41a1004ddb04a056d9f913ee1a7"} Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.685510 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerStarted","Data":"556f226c2e07c69c564c33a90bfecbdda9283e9c52f8a2815253aa190bf5719f"} Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.685689 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.685664 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-central-agent" containerID="cri-o://235ddcb5467cde2032c32523cecb58c63cc6c0fa70a16daed83ea4135426275a" gracePeriod=30 Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.685778 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="proxy-httpd" containerID="cri-o://556f226c2e07c69c564c33a90bfecbdda9283e9c52f8a2815253aa190bf5719f" gracePeriod=30 Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.685772 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="sg-core" containerID="cri-o://9a79a68507a5a8cdc89cfe92afa51e9aa9718bc6b2a86daac2d4281e104c4964" gracePeriod=30 Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.685812 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-notification-agent" containerID="cri-o://c95bcc8843b0181781f0ac4a9e58461f53b55eae8de8ac0e60563d5852c08d5f" gracePeriod=30 Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.706212 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" podStartSLOduration=2.154067592 podStartE2EDuration="12.706191327s" podCreationTimestamp="2025-12-02 18:59:41 +0000 UTC" firstStartedPulling="2025-12-02 18:59:42.679772323 +0000 UTC m=+1413.452664651" lastFinishedPulling="2025-12-02 18:59:53.231896058 +0000 UTC m=+1424.004788386" observedRunningTime="2025-12-02 18:59:53.700152501 +0000 UTC m=+1424.473044829" watchObservedRunningTime="2025-12-02 18:59:53.706191327 +0000 UTC m=+1424.479083655" Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.732483 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9096180230000002 podStartE2EDuration="10.732459788s" podCreationTimestamp="2025-12-02 18:59:43 +0000 UTC" firstStartedPulling="2025-12-02 18:59:44.403408946 +0000 UTC m=+1415.176301274" lastFinishedPulling="2025-12-02 18:59:53.226250721 +0000 UTC m=+1423.999143039" observedRunningTime="2025-12-02 18:59:53.72365573 +0000 UTC m=+1424.496548058" watchObservedRunningTime="2025-12-02 18:59:53.732459788 +0000 UTC m=+1424.505352116" Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.769461 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.820845 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.820926 4792 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 18:59:53 crc kubenswrapper[4792]: I1202 18:59:53.826137 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 18:59:54 crc kubenswrapper[4792]: I1202 18:59:54.698139 4792 generic.go:334] "Generic (PLEG): container finished" podID="6289387e-3bb7-49ef-a910-8f5277113b97" containerID="556f226c2e07c69c564c33a90bfecbdda9283e9c52f8a2815253aa190bf5719f" exitCode=0 Dec 02 18:59:54 crc kubenswrapper[4792]: I1202 18:59:54.698456 4792 generic.go:334] "Generic (PLEG): container finished" podID="6289387e-3bb7-49ef-a910-8f5277113b97" containerID="9a79a68507a5a8cdc89cfe92afa51e9aa9718bc6b2a86daac2d4281e104c4964" exitCode=2 Dec 02 18:59:54 crc kubenswrapper[4792]: I1202 18:59:54.698465 4792 generic.go:334] "Generic (PLEG): container finished" podID="6289387e-3bb7-49ef-a910-8f5277113b97" containerID="c95bcc8843b0181781f0ac4a9e58461f53b55eae8de8ac0e60563d5852c08d5f" exitCode=0 Dec 02 18:59:54 crc kubenswrapper[4792]: I1202 18:59:54.698232 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerDied","Data":"556f226c2e07c69c564c33a90bfecbdda9283e9c52f8a2815253aa190bf5719f"} Dec 02 18:59:54 crc kubenswrapper[4792]: I1202 18:59:54.698619 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerDied","Data":"9a79a68507a5a8cdc89cfe92afa51e9aa9718bc6b2a86daac2d4281e104c4964"} Dec 02 18:59:54 crc kubenswrapper[4792]: I1202 18:59:54.698645 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerDied","Data":"c95bcc8843b0181781f0ac4a9e58461f53b55eae8de8ac0e60563d5852c08d5f"} Dec 02 18:59:56 crc kubenswrapper[4792]: I1202 18:59:56.731194 4792 generic.go:334] "Generic (PLEG): container finished" podID="6289387e-3bb7-49ef-a910-8f5277113b97" containerID="235ddcb5467cde2032c32523cecb58c63cc6c0fa70a16daed83ea4135426275a" exitCode=0 Dec 02 18:59:56 crc kubenswrapper[4792]: I1202 18:59:56.732781 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerDied","Data":"235ddcb5467cde2032c32523cecb58c63cc6c0fa70a16daed83ea4135426275a"} Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.145460 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.258698 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-run-httpd\") pod \"6289387e-3bb7-49ef-a910-8f5277113b97\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259150 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-sg-core-conf-yaml\") pod \"6289387e-3bb7-49ef-a910-8f5277113b97\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259212 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-log-httpd\") pod \"6289387e-3bb7-49ef-a910-8f5277113b97\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259255 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6289387e-3bb7-49ef-a910-8f5277113b97" (UID: "6289387e-3bb7-49ef-a910-8f5277113b97"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259285 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-config-data\") pod \"6289387e-3bb7-49ef-a910-8f5277113b97\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259384 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-combined-ca-bundle\") pod \"6289387e-3bb7-49ef-a910-8f5277113b97\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259415 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-scripts\") pod \"6289387e-3bb7-49ef-a910-8f5277113b97\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259452 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsp47\" (UniqueName: \"kubernetes.io/projected/6289387e-3bb7-49ef-a910-8f5277113b97-kube-api-access-hsp47\") pod \"6289387e-3bb7-49ef-a910-8f5277113b97\" (UID: \"6289387e-3bb7-49ef-a910-8f5277113b97\") " Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.259590 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6289387e-3bb7-49ef-a910-8f5277113b97" (UID: "6289387e-3bb7-49ef-a910-8f5277113b97"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.260001 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.260025 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6289387e-3bb7-49ef-a910-8f5277113b97-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.266194 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-scripts" (OuterVolumeSpecName: "scripts") pod "6289387e-3bb7-49ef-a910-8f5277113b97" (UID: "6289387e-3bb7-49ef-a910-8f5277113b97"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.269777 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6289387e-3bb7-49ef-a910-8f5277113b97-kube-api-access-hsp47" (OuterVolumeSpecName: "kube-api-access-hsp47") pod "6289387e-3bb7-49ef-a910-8f5277113b97" (UID: "6289387e-3bb7-49ef-a910-8f5277113b97"). InnerVolumeSpecName "kube-api-access-hsp47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.288732 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6289387e-3bb7-49ef-a910-8f5277113b97" (UID: "6289387e-3bb7-49ef-a910-8f5277113b97"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.361965 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.361999 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.362013 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsp47\" (UniqueName: \"kubernetes.io/projected/6289387e-3bb7-49ef-a910-8f5277113b97-kube-api-access-hsp47\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.379617 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6289387e-3bb7-49ef-a910-8f5277113b97" (UID: "6289387e-3bb7-49ef-a910-8f5277113b97"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.418209 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-config-data" (OuterVolumeSpecName: "config-data") pod "6289387e-3bb7-49ef-a910-8f5277113b97" (UID: "6289387e-3bb7-49ef-a910-8f5277113b97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.464302 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.464339 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6289387e-3bb7-49ef-a910-8f5277113b97-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.749345 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6289387e-3bb7-49ef-a910-8f5277113b97","Type":"ContainerDied","Data":"d80703ec42ec5f0b9dcb47a4202ff740ac1ef09bc2a3c1d6afd8969fdc07da35"} Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.749400 4792 scope.go:117] "RemoveContainer" containerID="556f226c2e07c69c564c33a90bfecbdda9283e9c52f8a2815253aa190bf5719f" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.749416 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.788163 4792 scope.go:117] "RemoveContainer" containerID="9a79a68507a5a8cdc89cfe92afa51e9aa9718bc6b2a86daac2d4281e104c4964" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.788917 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.799713 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.811584 4792 scope.go:117] "RemoveContainer" containerID="c95bcc8843b0181781f0ac4a9e58461f53b55eae8de8ac0e60563d5852c08d5f" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816000 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:57 crc kubenswrapper[4792]: E1202 18:59:57.816423 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-central-agent" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816446 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-central-agent" Dec 02 18:59:57 crc kubenswrapper[4792]: E1202 18:59:57.816462 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="sg-core" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816469 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="sg-core" Dec 02 18:59:57 crc kubenswrapper[4792]: E1202 18:59:57.816482 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-notification-agent" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816488 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-notification-agent" Dec 02 18:59:57 crc kubenswrapper[4792]: E1202 18:59:57.816504 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="proxy-httpd" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816510 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="proxy-httpd" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816715 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-central-agent" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816739 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="ceilometer-notification-agent" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816760 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="sg-core" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.816772 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" containerName="proxy-httpd" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.818586 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.820630 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.821170 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.839666 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.871814 4792 scope.go:117] "RemoveContainer" containerID="235ddcb5467cde2032c32523cecb58c63cc6c0fa70a16daed83ea4135426275a" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.880598 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-config-data\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.880671 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.880737 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.880785 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-run-httpd\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.880850 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-log-httpd\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.880898 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klc72\" (UniqueName: \"kubernetes.io/projected/17828c32-070b-40f2-a6f8-c86e4582cee0-kube-api-access-klc72\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.880964 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-scripts\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.982886 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klc72\" (UniqueName: \"kubernetes.io/projected/17828c32-070b-40f2-a6f8-c86e4582cee0-kube-api-access-klc72\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.982978 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-scripts\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.983037 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-config-data\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.983071 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.983100 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.983136 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-run-httpd\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.983203 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-log-httpd\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.983805 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-log-httpd\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.985333 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-run-httpd\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.990952 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.991096 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-scripts\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.992109 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:57 crc kubenswrapper[4792]: I1202 18:59:57.995947 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-config-data\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:58 crc kubenswrapper[4792]: I1202 18:59:57.999042 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klc72\" (UniqueName: \"kubernetes.io/projected/17828c32-070b-40f2-a6f8-c86e4582cee0-kube-api-access-klc72\") pod \"ceilometer-0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " pod="openstack/ceilometer-0" Dec 02 18:59:58 crc kubenswrapper[4792]: I1202 18:59:58.142927 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 18:59:58 crc kubenswrapper[4792]: I1202 18:59:58.686778 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 18:59:58 crc kubenswrapper[4792]: W1202 18:59:58.689748 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17828c32_070b_40f2_a6f8_c86e4582cee0.slice/crio-8ad85ae802aaa0bef4c94c0a3acb88e077922f326244fd8c9d31cb370544adb4 WatchSource:0}: Error finding container 8ad85ae802aaa0bef4c94c0a3acb88e077922f326244fd8c9d31cb370544adb4: Status 404 returned error can't find the container with id 8ad85ae802aaa0bef4c94c0a3acb88e077922f326244fd8c9d31cb370544adb4 Dec 02 18:59:58 crc kubenswrapper[4792]: I1202 18:59:58.762674 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerStarted","Data":"8ad85ae802aaa0bef4c94c0a3acb88e077922f326244fd8c9d31cb370544adb4"} Dec 02 18:59:59 crc kubenswrapper[4792]: I1202 18:59:59.563298 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6289387e-3bb7-49ef-a910-8f5277113b97" path="/var/lib/kubelet/pods/6289387e-3bb7-49ef-a910-8f5277113b97/volumes" Dec 02 18:59:59 crc kubenswrapper[4792]: I1202 18:59:59.773983 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerStarted","Data":"9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99"} Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.150568 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5"] Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.152495 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.154694 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.159310 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.167332 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5"] Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.283679 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb6b8384-e7ae-45ac-a047-6a9d61275cff-secret-volume\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.283771 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g9wc\" (UniqueName: \"kubernetes.io/projected/fb6b8384-e7ae-45ac-a047-6a9d61275cff-kube-api-access-5g9wc\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.283998 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb6b8384-e7ae-45ac-a047-6a9d61275cff-config-volume\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.386872 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb6b8384-e7ae-45ac-a047-6a9d61275cff-secret-volume\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.388249 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g9wc\" (UniqueName: \"kubernetes.io/projected/fb6b8384-e7ae-45ac-a047-6a9d61275cff-kube-api-access-5g9wc\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.388781 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb6b8384-e7ae-45ac-a047-6a9d61275cff-config-volume\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.389823 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb6b8384-e7ae-45ac-a047-6a9d61275cff-config-volume\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.393611 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb6b8384-e7ae-45ac-a047-6a9d61275cff-secret-volume\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.420116 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g9wc\" (UniqueName: \"kubernetes.io/projected/fb6b8384-e7ae-45ac-a047-6a9d61275cff-kube-api-access-5g9wc\") pod \"collect-profiles-29411700-mhwr5\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.490391 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.785361 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerStarted","Data":"79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe"} Dec 02 19:00:00 crc kubenswrapper[4792]: I1202 19:00:00.785701 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerStarted","Data":"a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced"} Dec 02 19:00:01 crc kubenswrapper[4792]: I1202 19:00:01.001301 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5"] Dec 02 19:00:01 crc kubenswrapper[4792]: W1202 19:00:01.009417 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb6b8384_e7ae_45ac_a047_6a9d61275cff.slice/crio-43ab7a89369476153455d8aaf145fe900ca4016d5ef2f9536f15d827611ce2b4 WatchSource:0}: Error finding container 43ab7a89369476153455d8aaf145fe900ca4016d5ef2f9536f15d827611ce2b4: Status 404 returned error can't find the container with id 43ab7a89369476153455d8aaf145fe900ca4016d5ef2f9536f15d827611ce2b4 Dec 02 19:00:01 crc kubenswrapper[4792]: I1202 19:00:01.796399 4792 generic.go:334] "Generic (PLEG): container finished" podID="fb6b8384-e7ae-45ac-a047-6a9d61275cff" containerID="6e6ffb95697459136beac3289cc65741a59ce250a4421c887716076e7b7bf5bd" exitCode=0 Dec 02 19:00:01 crc kubenswrapper[4792]: I1202 19:00:01.796452 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" event={"ID":"fb6b8384-e7ae-45ac-a047-6a9d61275cff","Type":"ContainerDied","Data":"6e6ffb95697459136beac3289cc65741a59ce250a4421c887716076e7b7bf5bd"} Dec 02 19:00:01 crc kubenswrapper[4792]: I1202 19:00:01.796670 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" event={"ID":"fb6b8384-e7ae-45ac-a047-6a9d61275cff","Type":"ContainerStarted","Data":"43ab7a89369476153455d8aaf145fe900ca4016d5ef2f9536f15d827611ce2b4"} Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.310041 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.454623 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb6b8384-e7ae-45ac-a047-6a9d61275cff-config-volume\") pod \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.455157 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g9wc\" (UniqueName: \"kubernetes.io/projected/fb6b8384-e7ae-45ac-a047-6a9d61275cff-kube-api-access-5g9wc\") pod \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.455182 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb6b8384-e7ae-45ac-a047-6a9d61275cff-secret-volume\") pod \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\" (UID: \"fb6b8384-e7ae-45ac-a047-6a9d61275cff\") " Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.457000 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb6b8384-e7ae-45ac-a047-6a9d61275cff-config-volume" (OuterVolumeSpecName: "config-volume") pod "fb6b8384-e7ae-45ac-a047-6a9d61275cff" (UID: "fb6b8384-e7ae-45ac-a047-6a9d61275cff"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.469181 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb6b8384-e7ae-45ac-a047-6a9d61275cff-kube-api-access-5g9wc" (OuterVolumeSpecName: "kube-api-access-5g9wc") pod "fb6b8384-e7ae-45ac-a047-6a9d61275cff" (UID: "fb6b8384-e7ae-45ac-a047-6a9d61275cff"). InnerVolumeSpecName "kube-api-access-5g9wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.471636 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb6b8384-e7ae-45ac-a047-6a9d61275cff-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fb6b8384-e7ae-45ac-a047-6a9d61275cff" (UID: "fb6b8384-e7ae-45ac-a047-6a9d61275cff"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.556741 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g9wc\" (UniqueName: \"kubernetes.io/projected/fb6b8384-e7ae-45ac-a047-6a9d61275cff-kube-api-access-5g9wc\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.556773 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb6b8384-e7ae-45ac-a047-6a9d61275cff-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.556784 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb6b8384-e7ae-45ac-a047-6a9d61275cff-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.825061 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" event={"ID":"fb6b8384-e7ae-45ac-a047-6a9d61275cff","Type":"ContainerDied","Data":"43ab7a89369476153455d8aaf145fe900ca4016d5ef2f9536f15d827611ce2b4"} Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.825101 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43ab7a89369476153455d8aaf145fe900ca4016d5ef2f9536f15d827611ce2b4" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.825104 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.828033 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerStarted","Data":"a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9"} Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.828253 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 19:00:03 crc kubenswrapper[4792]: I1202 19:00:03.851396 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.329401235 podStartE2EDuration="6.851372958s" podCreationTimestamp="2025-12-02 18:59:57 +0000 UTC" firstStartedPulling="2025-12-02 18:59:58.692134343 +0000 UTC m=+1429.465026671" lastFinishedPulling="2025-12-02 19:00:03.214106066 +0000 UTC m=+1433.986998394" observedRunningTime="2025-12-02 19:00:03.847186559 +0000 UTC m=+1434.620078897" watchObservedRunningTime="2025-12-02 19:00:03.851372958 +0000 UTC m=+1434.624265306" Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.679208 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.679810 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-central-agent" containerID="cri-o://9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99" gracePeriod=30 Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.680511 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="sg-core" containerID="cri-o://79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe" gracePeriod=30 Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.680546 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="proxy-httpd" containerID="cri-o://a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9" gracePeriod=30 Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.680603 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-notification-agent" containerID="cri-o://a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced" gracePeriod=30 Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.859954 4792 generic.go:334] "Generic (PLEG): container finished" podID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerID="79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe" exitCode=2 Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.860022 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerDied","Data":"79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe"} Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.861868 4792 generic.go:334] "Generic (PLEG): container finished" podID="c750063f-56d2-47d7-9237-a7ab6a26f6a2" containerID="96c8a20700cfe76e4fea8b6e0ea62097ccb2d41a1004ddb04a056d9f913ee1a7" exitCode=0 Dec 02 19:00:06 crc kubenswrapper[4792]: I1202 19:00:06.861892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" event={"ID":"c750063f-56d2-47d7-9237-a7ab6a26f6a2","Type":"ContainerDied","Data":"96c8a20700cfe76e4fea8b6e0ea62097ccb2d41a1004ddb04a056d9f913ee1a7"} Dec 02 19:00:07 crc kubenswrapper[4792]: I1202 19:00:07.879274 4792 generic.go:334] "Generic (PLEG): container finished" podID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerID="a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9" exitCode=0 Dec 02 19:00:07 crc kubenswrapper[4792]: I1202 19:00:07.879595 4792 generic.go:334] "Generic (PLEG): container finished" podID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerID="a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced" exitCode=0 Dec 02 19:00:07 crc kubenswrapper[4792]: I1202 19:00:07.879368 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerDied","Data":"a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9"} Dec 02 19:00:07 crc kubenswrapper[4792]: I1202 19:00:07.879668 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerDied","Data":"a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced"} Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.316264 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.450695 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-config-data\") pod \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.450771 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5j6l\" (UniqueName: \"kubernetes.io/projected/c750063f-56d2-47d7-9237-a7ab6a26f6a2-kube-api-access-b5j6l\") pod \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.450803 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-combined-ca-bundle\") pod \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.451014 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-scripts\") pod \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\" (UID: \"c750063f-56d2-47d7-9237-a7ab6a26f6a2\") " Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.460407 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-scripts" (OuterVolumeSpecName: "scripts") pod "c750063f-56d2-47d7-9237-a7ab6a26f6a2" (UID: "c750063f-56d2-47d7-9237-a7ab6a26f6a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.468930 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c750063f-56d2-47d7-9237-a7ab6a26f6a2-kube-api-access-b5j6l" (OuterVolumeSpecName: "kube-api-access-b5j6l") pod "c750063f-56d2-47d7-9237-a7ab6a26f6a2" (UID: "c750063f-56d2-47d7-9237-a7ab6a26f6a2"). InnerVolumeSpecName "kube-api-access-b5j6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.487240 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c750063f-56d2-47d7-9237-a7ab6a26f6a2" (UID: "c750063f-56d2-47d7-9237-a7ab6a26f6a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.506053 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-config-data" (OuterVolumeSpecName: "config-data") pod "c750063f-56d2-47d7-9237-a7ab6a26f6a2" (UID: "c750063f-56d2-47d7-9237-a7ab6a26f6a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.554021 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.554273 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5j6l\" (UniqueName: \"kubernetes.io/projected/c750063f-56d2-47d7-9237-a7ab6a26f6a2-kube-api-access-b5j6l\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.554288 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.554300 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c750063f-56d2-47d7-9237-a7ab6a26f6a2-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.892136 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" event={"ID":"c750063f-56d2-47d7-9237-a7ab6a26f6a2","Type":"ContainerDied","Data":"344567230a7a8791dbd77471976cc73f32ee571b0359e7e088c4c006c207bca1"} Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.892181 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="344567230a7a8791dbd77471976cc73f32ee571b0359e7e088c4c006c207bca1" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.892242 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jh2dc" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.975952 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 19:00:08 crc kubenswrapper[4792]: E1202 19:00:08.976476 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb6b8384-e7ae-45ac-a047-6a9d61275cff" containerName="collect-profiles" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.976497 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb6b8384-e7ae-45ac-a047-6a9d61275cff" containerName="collect-profiles" Dec 02 19:00:08 crc kubenswrapper[4792]: E1202 19:00:08.976545 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c750063f-56d2-47d7-9237-a7ab6a26f6a2" containerName="nova-cell0-conductor-db-sync" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.976554 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c750063f-56d2-47d7-9237-a7ab6a26f6a2" containerName="nova-cell0-conductor-db-sync" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.976789 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb6b8384-e7ae-45ac-a047-6a9d61275cff" containerName="collect-profiles" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.976816 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c750063f-56d2-47d7-9237-a7ab6a26f6a2" containerName="nova-cell0-conductor-db-sync" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.981916 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.984821 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-68v5w" Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.985105 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 19:00:08 crc kubenswrapper[4792]: I1202 19:00:08.987154 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.063875 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed774e-25d0-47b5-8dd4-1113a9310d29-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.064013 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed774e-25d0-47b5-8dd4-1113a9310d29-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.064113 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4xpp\" (UniqueName: \"kubernetes.io/projected/c6ed774e-25d0-47b5-8dd4-1113a9310d29-kube-api-access-c4xpp\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.165781 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4xpp\" (UniqueName: \"kubernetes.io/projected/c6ed774e-25d0-47b5-8dd4-1113a9310d29-kube-api-access-c4xpp\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.165995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed774e-25d0-47b5-8dd4-1113a9310d29-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.166051 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed774e-25d0-47b5-8dd4-1113a9310d29-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.170989 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed774e-25d0-47b5-8dd4-1113a9310d29-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.171176 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed774e-25d0-47b5-8dd4-1113a9310d29-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.185682 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4xpp\" (UniqueName: \"kubernetes.io/projected/c6ed774e-25d0-47b5-8dd4-1113a9310d29-kube-api-access-c4xpp\") pod \"nova-cell0-conductor-0\" (UID: \"c6ed774e-25d0-47b5-8dd4-1113a9310d29\") " pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.313661 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.775595 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 19:00:09 crc kubenswrapper[4792]: I1202 19:00:09.902833 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c6ed774e-25d0-47b5-8dd4-1113a9310d29","Type":"ContainerStarted","Data":"7919ad63d542bbe2fb0c061f75e9366dd3567badfccf95fbe51ab9ca48852483"} Dec 02 19:00:10 crc kubenswrapper[4792]: I1202 19:00:10.911960 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c6ed774e-25d0-47b5-8dd4-1113a9310d29","Type":"ContainerStarted","Data":"e8902b296da8904e2c0680e7b15e0d9e496fd62a5cc75e13a15dcc144bcd09d2"} Dec 02 19:00:10 crc kubenswrapper[4792]: I1202 19:00:10.912210 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:10 crc kubenswrapper[4792]: I1202 19:00:10.938222 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.9382031 podStartE2EDuration="2.9382031s" podCreationTimestamp="2025-12-02 19:00:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:10.932296827 +0000 UTC m=+1441.705189165" watchObservedRunningTime="2025-12-02 19:00:10.9382031 +0000 UTC m=+1441.711095428" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.452928 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.629851 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-scripts\") pod \"17828c32-070b-40f2-a6f8-c86e4582cee0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.629977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-config-data\") pod \"17828c32-070b-40f2-a6f8-c86e4582cee0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630021 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-combined-ca-bundle\") pod \"17828c32-070b-40f2-a6f8-c86e4582cee0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630151 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-log-httpd\") pod \"17828c32-070b-40f2-a6f8-c86e4582cee0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630228 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-run-httpd\") pod \"17828c32-070b-40f2-a6f8-c86e4582cee0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630249 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klc72\" (UniqueName: \"kubernetes.io/projected/17828c32-070b-40f2-a6f8-c86e4582cee0-kube-api-access-klc72\") pod \"17828c32-070b-40f2-a6f8-c86e4582cee0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630270 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-sg-core-conf-yaml\") pod \"17828c32-070b-40f2-a6f8-c86e4582cee0\" (UID: \"17828c32-070b-40f2-a6f8-c86e4582cee0\") " Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630556 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "17828c32-070b-40f2-a6f8-c86e4582cee0" (UID: "17828c32-070b-40f2-a6f8-c86e4582cee0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630655 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "17828c32-070b-40f2-a6f8-c86e4582cee0" (UID: "17828c32-070b-40f2-a6f8-c86e4582cee0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.630713 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.639624 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17828c32-070b-40f2-a6f8-c86e4582cee0-kube-api-access-klc72" (OuterVolumeSpecName: "kube-api-access-klc72") pod "17828c32-070b-40f2-a6f8-c86e4582cee0" (UID: "17828c32-070b-40f2-a6f8-c86e4582cee0"). InnerVolumeSpecName "kube-api-access-klc72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.644651 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-scripts" (OuterVolumeSpecName: "scripts") pod "17828c32-070b-40f2-a6f8-c86e4582cee0" (UID: "17828c32-070b-40f2-a6f8-c86e4582cee0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.684840 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "17828c32-070b-40f2-a6f8-c86e4582cee0" (UID: "17828c32-070b-40f2-a6f8-c86e4582cee0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.732932 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17828c32-070b-40f2-a6f8-c86e4582cee0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.732964 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klc72\" (UniqueName: \"kubernetes.io/projected/17828c32-070b-40f2-a6f8-c86e4582cee0-kube-api-access-klc72\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.732976 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.732986 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.763644 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17828c32-070b-40f2-a6f8-c86e4582cee0" (UID: "17828c32-070b-40f2-a6f8-c86e4582cee0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.781028 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-config-data" (OuterVolumeSpecName: "config-data") pod "17828c32-070b-40f2-a6f8-c86e4582cee0" (UID: "17828c32-070b-40f2-a6f8-c86e4582cee0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.834689 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.834724 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17828c32-070b-40f2-a6f8-c86e4582cee0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.924606 4792 generic.go:334] "Generic (PLEG): container finished" podID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerID="9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99" exitCode=0 Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.924665 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerDied","Data":"9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99"} Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.924707 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17828c32-070b-40f2-a6f8-c86e4582cee0","Type":"ContainerDied","Data":"8ad85ae802aaa0bef4c94c0a3acb88e077922f326244fd8c9d31cb370544adb4"} Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.924730 4792 scope.go:117] "RemoveContainer" containerID="a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.924761 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.954380 4792 scope.go:117] "RemoveContainer" containerID="79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe" Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.993655 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:11 crc kubenswrapper[4792]: I1202 19:00:11.997922 4792 scope.go:117] "RemoveContainer" containerID="a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.017092 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.030076 4792 scope.go:117] "RemoveContainer" containerID="9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.033390 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.040440 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-central-agent" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.040473 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-central-agent" Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.040491 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="sg-core" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.040500 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="sg-core" Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.040781 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="proxy-httpd" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.040798 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="proxy-httpd" Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.040818 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-notification-agent" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.040826 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-notification-agent" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.041098 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-notification-agent" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.041117 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="ceilometer-central-agent" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.041128 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="sg-core" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.041149 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" containerName="proxy-httpd" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.042985 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.045603 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.048014 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.048053 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.065397 4792 scope.go:117] "RemoveContainer" containerID="a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9" Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.065962 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9\": container with ID starting with a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9 not found: ID does not exist" containerID="a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.065997 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9"} err="failed to get container status \"a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9\": rpc error: code = NotFound desc = could not find container \"a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9\": container with ID starting with a3984d00e2bdd161932a020ba55c218065e4a3aedfeaaeb936f7358154b4abb9 not found: ID does not exist" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.066023 4792 scope.go:117] "RemoveContainer" containerID="79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe" Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.066225 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe\": container with ID starting with 79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe not found: ID does not exist" containerID="79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.066254 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe"} err="failed to get container status \"79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe\": rpc error: code = NotFound desc = could not find container \"79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe\": container with ID starting with 79bc45c4acd0c6882826ae3dd2456dc6be642cd97e7ef6e1c1136fe97cf536fe not found: ID does not exist" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.066272 4792 scope.go:117] "RemoveContainer" containerID="a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced" Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.066492 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced\": container with ID starting with a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced not found: ID does not exist" containerID="a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.066535 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced"} err="failed to get container status \"a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced\": rpc error: code = NotFound desc = could not find container \"a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced\": container with ID starting with a5238bdab8364f7f858e89f42ebfcc6617b4cae696b5f7161689e9acf5855ced not found: ID does not exist" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.066553 4792 scope.go:117] "RemoveContainer" containerID="9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99" Dec 02 19:00:12 crc kubenswrapper[4792]: E1202 19:00:12.066779 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99\": container with ID starting with 9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99 not found: ID does not exist" containerID="9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.066806 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99"} err="failed to get container status \"9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99\": rpc error: code = NotFound desc = could not find container \"9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99\": container with ID starting with 9faf00a3754bbfe0a4fa067155c117a76747232159701546ccf05b4b72775f99 not found: ID does not exist" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.139740 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-log-httpd\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.140052 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-run-httpd\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.140220 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.140277 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-config-data\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.140530 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-scripts\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.140585 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgn9p\" (UniqueName: \"kubernetes.io/projected/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-kube-api-access-sgn9p\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.140747 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.242812 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.242888 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-log-httpd\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.242917 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-run-httpd\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.242956 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.242976 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-config-data\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.243021 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-scripts\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.243037 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgn9p\" (UniqueName: \"kubernetes.io/projected/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-kube-api-access-sgn9p\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.243531 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-run-httpd\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.243558 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-log-httpd\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.247370 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.247627 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.248720 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-scripts\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.265872 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-config-data\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.275334 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgn9p\" (UniqueName: \"kubernetes.io/projected/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-kube-api-access-sgn9p\") pod \"ceilometer-0\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " pod="openstack/ceilometer-0" Dec 02 19:00:12 crc kubenswrapper[4792]: I1202 19:00:12.359804 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:13 crc kubenswrapper[4792]: I1202 19:00:13.278125 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:13 crc kubenswrapper[4792]: I1202 19:00:13.549321 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17828c32-070b-40f2-a6f8-c86e4582cee0" path="/var/lib/kubelet/pods/17828c32-070b-40f2-a6f8-c86e4582cee0/volumes" Dec 02 19:00:13 crc kubenswrapper[4792]: I1202 19:00:13.970736 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerStarted","Data":"a3313d463969b31df8605a6d07db02a35a918d4eba57257fb921e8313dfbb57a"} Dec 02 19:00:13 crc kubenswrapper[4792]: I1202 19:00:13.971081 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerStarted","Data":"8e25af628c01feef64ff3b37b4fff1f72daf154ba26d63389756e75747b590df"} Dec 02 19:00:15 crc kubenswrapper[4792]: I1202 19:00:15.993814 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerStarted","Data":"5a9b1582e15bb65792e7df9a181726aa3fb9f3f8acb6aff76c21e16bc6d385df"} Dec 02 19:00:17 crc kubenswrapper[4792]: I1202 19:00:17.017810 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerStarted","Data":"db82f0bb76c64bdde4546c248e7fc8193594c10420a57d4c7770ec959341fb04"} Dec 02 19:00:18 crc kubenswrapper[4792]: I1202 19:00:18.028419 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerStarted","Data":"cf37d2c6f1947ee99004a85750d08ab4604a9271fadad530a025b430987e144c"} Dec 02 19:00:18 crc kubenswrapper[4792]: I1202 19:00:18.029003 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 19:00:18 crc kubenswrapper[4792]: I1202 19:00:18.064582 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.890374806 podStartE2EDuration="7.064562829s" podCreationTimestamp="2025-12-02 19:00:11 +0000 UTC" firstStartedPulling="2025-12-02 19:00:13.285211876 +0000 UTC m=+1444.058104204" lastFinishedPulling="2025-12-02 19:00:17.459399899 +0000 UTC m=+1448.232292227" observedRunningTime="2025-12-02 19:00:18.056436108 +0000 UTC m=+1448.829328436" watchObservedRunningTime="2025-12-02 19:00:18.064562829 +0000 UTC m=+1448.837455157" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.351514 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.817676 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-428v5"] Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.819373 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.833389 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.835472 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.840279 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-428v5"] Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.940697 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-config-data\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.940813 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s26rc\" (UniqueName: \"kubernetes.io/projected/82858e4f-bc42-4839-8d38-9f7f1772a089-kube-api-access-s26rc\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.940878 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-scripts\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.940963 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.988648 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.989984 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:19 crc kubenswrapper[4792]: I1202 19:00:19.995205 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.042221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s26rc\" (UniqueName: \"kubernetes.io/projected/82858e4f-bc42-4839-8d38-9f7f1772a089-kube-api-access-s26rc\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.042291 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvhvg\" (UniqueName: \"kubernetes.io/projected/9c321714-c201-4d1e-a0f9-d2fe24983c28-kube-api-access-jvhvg\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.042319 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.042355 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-scripts\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.042419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.042467 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-config-data\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.042498 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.051286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-scripts\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.051763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.058743 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-config-data\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.062367 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.064032 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.075827 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.096456 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s26rc\" (UniqueName: \"kubernetes.io/projected/82858e4f-bc42-4839-8d38-9f7f1772a089-kube-api-access-s26rc\") pod \"nova-cell0-cell-mapping-428v5\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.102998 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.127045 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.140333 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.143845 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvhvg\" (UniqueName: \"kubernetes.io/projected/9c321714-c201-4d1e-a0f9-d2fe24983c28-kube-api-access-jvhvg\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.143880 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.143916 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-config-data\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.143973 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.144018 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsqkn\" (UniqueName: \"kubernetes.io/projected/39f28fd8-7806-46cb-b9cc-1be441cf71ef-kube-api-access-zsqkn\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.144043 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f28fd8-7806-46cb-b9cc-1be441cf71ef-logs\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.144071 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.155563 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.177444 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.196442 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvhvg\" (UniqueName: \"kubernetes.io/projected/9c321714-c201-4d1e-a0f9-d2fe24983c28-kube-api-access-jvhvg\") pod \"nova-cell1-novncproxy-0\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.269984 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-config-data\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.270164 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.270285 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsqkn\" (UniqueName: \"kubernetes.io/projected/39f28fd8-7806-46cb-b9cc-1be441cf71ef-kube-api-access-zsqkn\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.270332 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f28fd8-7806-46cb-b9cc-1be441cf71ef-logs\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.271065 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f28fd8-7806-46cb-b9cc-1be441cf71ef-logs\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.276993 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.291656 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.293952 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.299222 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsqkn\" (UniqueName: \"kubernetes.io/projected/39f28fd8-7806-46cb-b9cc-1be441cf71ef-kube-api-access-zsqkn\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.300014 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.312411 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.322158 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-config-data\") pod \"nova-metadata-0\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.336431 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.371043 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.371433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-config-data\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.372309 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt7rj\" (UniqueName: \"kubernetes.io/projected/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-kube-api-access-wt7rj\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.374043 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.376871 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.378071 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.388685 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.395638 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78cd565959-lftp9"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.398474 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.410610 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.426603 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-lftp9"] Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.482956 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgx8h\" (UniqueName: \"kubernetes.io/projected/6194d48a-051d-4700-ab26-11280eb387f3-kube-api-access-rgx8h\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483004 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483119 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483137 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6194d48a-051d-4700-ab26-11280eb387f3-logs\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483200 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-config-data\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483227 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483264 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483296 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzphp\" (UniqueName: \"kubernetes.io/projected/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-kube-api-access-hzphp\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483319 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-config-data\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483338 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-config\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483384 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt7rj\" (UniqueName: \"kubernetes.io/projected/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-kube-api-access-wt7rj\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483438 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-svc\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.483471 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.492466 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.510069 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt7rj\" (UniqueName: \"kubernetes.io/projected/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-kube-api-access-wt7rj\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.514286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-config-data\") pod \"nova-scheduler-0\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590244 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-svc\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590286 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590356 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgx8h\" (UniqueName: \"kubernetes.io/projected/6194d48a-051d-4700-ab26-11280eb387f3-kube-api-access-rgx8h\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590384 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590445 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6194d48a-051d-4700-ab26-11280eb387f3-logs\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590485 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-config-data\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590506 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590553 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590575 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzphp\" (UniqueName: \"kubernetes.io/projected/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-kube-api-access-hzphp\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.590593 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-config\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.592839 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6194d48a-051d-4700-ab26-11280eb387f3-logs\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.593767 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-svc\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.595634 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.596645 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-config\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.597502 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.598133 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.598158 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-config-data\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.599155 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.617141 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgx8h\" (UniqueName: \"kubernetes.io/projected/6194d48a-051d-4700-ab26-11280eb387f3-kube-api-access-rgx8h\") pod \"nova-api-0\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.622556 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzphp\" (UniqueName: \"kubernetes.io/projected/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-kube-api-access-hzphp\") pod \"dnsmasq-dns-78cd565959-lftp9\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.703628 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.724509 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.730585 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:20 crc kubenswrapper[4792]: I1202 19:00:20.844980 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-428v5"] Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.119056 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-428v5" event={"ID":"82858e4f-bc42-4839-8d38-9f7f1772a089","Type":"ContainerStarted","Data":"2c4559cddfde3fd29a01f0916f4bbd35b76fe4de2e96081657a65393a02a8113"} Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.142265 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.296533 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:21 crc kubenswrapper[4792]: W1202 19:00:21.301226 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c321714_c201_4d1e_a0f9_d2fe24983c28.slice/crio-1023b8a1ae65326d99e9812ffc1a5bc40e3d7206a451f4ec96f350dbf1fce119 WatchSource:0}: Error finding container 1023b8a1ae65326d99e9812ffc1a5bc40e3d7206a451f4ec96f350dbf1fce119: Status 404 returned error can't find the container with id 1023b8a1ae65326d99e9812ffc1a5bc40e3d7206a451f4ec96f350dbf1fce119 Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.574298 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.648777 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.692138 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-glqwc"] Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.693635 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.696893 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.697273 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.702654 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-glqwc"] Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.810747 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-lftp9"] Dec 02 19:00:21 crc kubenswrapper[4792]: W1202 19:00:21.819644 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92187bf7_8ae3_4a53_9e20_f1d31f44f0d0.slice/crio-83a5343cccc538286f4eb6e56d5f40919a26f396f534e68ec8e991597dfd7065 WatchSource:0}: Error finding container 83a5343cccc538286f4eb6e56d5f40919a26f396f534e68ec8e991597dfd7065: Status 404 returned error can't find the container with id 83a5343cccc538286f4eb6e56d5f40919a26f396f534e68ec8e991597dfd7065 Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.847852 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjk8m\" (UniqueName: \"kubernetes.io/projected/67c9d25a-3961-4b7d-bad6-340311a55dde-kube-api-access-qjk8m\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.848608 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.848647 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-config-data\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.848745 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-scripts\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.951001 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjk8m\" (UniqueName: \"kubernetes.io/projected/67c9d25a-3961-4b7d-bad6-340311a55dde-kube-api-access-qjk8m\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.951072 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.951104 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-config-data\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.951182 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-scripts\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.956674 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.957349 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-scripts\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.965359 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-config-data\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:21 crc kubenswrapper[4792]: I1202 19:00:21.968035 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjk8m\" (UniqueName: \"kubernetes.io/projected/67c9d25a-3961-4b7d-bad6-340311a55dde-kube-api-access-qjk8m\") pod \"nova-cell1-conductor-db-sync-glqwc\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.021958 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.150195 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"9c321714-c201-4d1e-a0f9-d2fe24983c28","Type":"ContainerStarted","Data":"1023b8a1ae65326d99e9812ffc1a5bc40e3d7206a451f4ec96f350dbf1fce119"} Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.153250 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6194d48a-051d-4700-ab26-11280eb387f3","Type":"ContainerStarted","Data":"a5478053b7757aa402a7528b8186da4ba23ff40bae7dac562d743fe1bca6c749"} Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.154634 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6","Type":"ContainerStarted","Data":"e400ad10f3e6b6f5dab242b9c970f7a1b8e4c581c7f8f22677676a8e836a345f"} Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.156950 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-lftp9" event={"ID":"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0","Type":"ContainerStarted","Data":"83a5343cccc538286f4eb6e56d5f40919a26f396f534e68ec8e991597dfd7065"} Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.166040 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-428v5" event={"ID":"82858e4f-bc42-4839-8d38-9f7f1772a089","Type":"ContainerStarted","Data":"0dc8f4ae80975827647fd7ba656d100c9c201b93d36d62e17511fbed9a2ebae4"} Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.173051 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"39f28fd8-7806-46cb-b9cc-1be441cf71ef","Type":"ContainerStarted","Data":"7a8e52caa71c9bc3af3bdc19dd3992f44ad98e40121a239bdcf113a78a7ac2ed"} Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.206886 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-428v5" podStartSLOduration=3.206868304 podStartE2EDuration="3.206868304s" podCreationTimestamp="2025-12-02 19:00:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:22.202169852 +0000 UTC m=+1452.975062180" watchObservedRunningTime="2025-12-02 19:00:22.206868304 +0000 UTC m=+1452.979760632" Dec 02 19:00:22 crc kubenswrapper[4792]: I1202 19:00:22.573547 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-glqwc"] Dec 02 19:00:23 crc kubenswrapper[4792]: I1202 19:00:23.197422 4792 generic.go:334] "Generic (PLEG): container finished" podID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerID="6aa9c6b275819640b44ddb3cfd4e9a349ad08dc5ae9af788b165d4f86c1fba09" exitCode=0 Dec 02 19:00:23 crc kubenswrapper[4792]: I1202 19:00:23.197927 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-lftp9" event={"ID":"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0","Type":"ContainerDied","Data":"6aa9c6b275819640b44ddb3cfd4e9a349ad08dc5ae9af788b165d4f86c1fba09"} Dec 02 19:00:23 crc kubenswrapper[4792]: I1202 19:00:23.219560 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-glqwc" event={"ID":"67c9d25a-3961-4b7d-bad6-340311a55dde","Type":"ContainerStarted","Data":"59e7f042467af8b6a374137f3bdcac4f549bd4104b755cdb1be321401b509434"} Dec 02 19:00:24 crc kubenswrapper[4792]: I1202 19:00:24.090100 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:24 crc kubenswrapper[4792]: I1202 19:00:24.102495 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.255682 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"9c321714-c201-4d1e-a0f9-d2fe24983c28","Type":"ContainerStarted","Data":"fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.255852 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="9c321714-c201-4d1e-a0f9-d2fe24983c28" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea" gracePeriod=30 Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.257789 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6194d48a-051d-4700-ab26-11280eb387f3","Type":"ContainerStarted","Data":"e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.257813 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6194d48a-051d-4700-ab26-11280eb387f3","Type":"ContainerStarted","Data":"51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.260132 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-glqwc" event={"ID":"67c9d25a-3961-4b7d-bad6-340311a55dde","Type":"ContainerStarted","Data":"9dd237969616cdf7de0455b8afae5553788744c495490aaf1e021952c331f415"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.261805 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6","Type":"ContainerStarted","Data":"d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.265211 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-lftp9" event={"ID":"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0","Type":"ContainerStarted","Data":"c033e5f4a3c35edec4a7596610fee866738e841d71ea312bd5fb797f51f7a2e0"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.265320 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.272782 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"39f28fd8-7806-46cb-b9cc-1be441cf71ef","Type":"ContainerStarted","Data":"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.272821 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"39f28fd8-7806-46cb-b9cc-1be441cf71ef","Type":"ContainerStarted","Data":"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27"} Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.272913 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-log" containerID="cri-o://70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27" gracePeriod=30 Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.273669 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-metadata" containerID="cri-o://60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8" gracePeriod=30 Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.276178 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.64975024 podStartE2EDuration="7.276158327s" podCreationTimestamp="2025-12-02 19:00:19 +0000 UTC" firstStartedPulling="2025-12-02 19:00:21.30561221 +0000 UTC m=+1452.078504538" lastFinishedPulling="2025-12-02 19:00:24.932020297 +0000 UTC m=+1455.704912625" observedRunningTime="2025-12-02 19:00:26.272291607 +0000 UTC m=+1457.045183935" watchObservedRunningTime="2025-12-02 19:00:26.276158327 +0000 UTC m=+1457.049050655" Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.296695 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.902637962 podStartE2EDuration="6.296676838s" podCreationTimestamp="2025-12-02 19:00:20 +0000 UTC" firstStartedPulling="2025-12-02 19:00:21.604954867 +0000 UTC m=+1452.377847195" lastFinishedPulling="2025-12-02 19:00:24.998993743 +0000 UTC m=+1455.771886071" observedRunningTime="2025-12-02 19:00:26.295474417 +0000 UTC m=+1457.068366745" watchObservedRunningTime="2025-12-02 19:00:26.296676838 +0000 UTC m=+1457.069569166" Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.321966 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.924228792 podStartE2EDuration="6.321944063s" podCreationTimestamp="2025-12-02 19:00:20 +0000 UTC" firstStartedPulling="2025-12-02 19:00:21.602653527 +0000 UTC m=+1452.375545845" lastFinishedPulling="2025-12-02 19:00:25.000368778 +0000 UTC m=+1455.773261116" observedRunningTime="2025-12-02 19:00:26.310570828 +0000 UTC m=+1457.083463156" watchObservedRunningTime="2025-12-02 19:00:26.321944063 +0000 UTC m=+1457.094836391" Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.331163 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-glqwc" podStartSLOduration=5.331145122 podStartE2EDuration="5.331145122s" podCreationTimestamp="2025-12-02 19:00:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:26.322303562 +0000 UTC m=+1457.095195890" watchObservedRunningTime="2025-12-02 19:00:26.331145122 +0000 UTC m=+1457.104037440" Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.359100 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78cd565959-lftp9" podStartSLOduration=6.359080925 podStartE2EDuration="6.359080925s" podCreationTimestamp="2025-12-02 19:00:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:26.349501447 +0000 UTC m=+1457.122393775" watchObservedRunningTime="2025-12-02 19:00:26.359080925 +0000 UTC m=+1457.131973253" Dec 02 19:00:26 crc kubenswrapper[4792]: I1202 19:00:26.371958 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.613311307 podStartE2EDuration="7.371941659s" podCreationTimestamp="2025-12-02 19:00:19 +0000 UTC" firstStartedPulling="2025-12-02 19:00:21.175025908 +0000 UTC m=+1451.947918236" lastFinishedPulling="2025-12-02 19:00:24.93365626 +0000 UTC m=+1455.706548588" observedRunningTime="2025-12-02 19:00:26.367010781 +0000 UTC m=+1457.139903109" watchObservedRunningTime="2025-12-02 19:00:26.371941659 +0000 UTC m=+1457.144833987" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.000292 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.169825 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-combined-ca-bundle\") pod \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.169950 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-config-data\") pod \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.170026 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsqkn\" (UniqueName: \"kubernetes.io/projected/39f28fd8-7806-46cb-b9cc-1be441cf71ef-kube-api-access-zsqkn\") pod \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.170056 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f28fd8-7806-46cb-b9cc-1be441cf71ef-logs\") pod \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\" (UID: \"39f28fd8-7806-46cb-b9cc-1be441cf71ef\") " Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.170492 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39f28fd8-7806-46cb-b9cc-1be441cf71ef-logs" (OuterVolumeSpecName: "logs") pod "39f28fd8-7806-46cb-b9cc-1be441cf71ef" (UID: "39f28fd8-7806-46cb-b9cc-1be441cf71ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.176147 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39f28fd8-7806-46cb-b9cc-1be441cf71ef-kube-api-access-zsqkn" (OuterVolumeSpecName: "kube-api-access-zsqkn") pod "39f28fd8-7806-46cb-b9cc-1be441cf71ef" (UID: "39f28fd8-7806-46cb-b9cc-1be441cf71ef"). InnerVolumeSpecName "kube-api-access-zsqkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.200746 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-config-data" (OuterVolumeSpecName: "config-data") pod "39f28fd8-7806-46cb-b9cc-1be441cf71ef" (UID: "39f28fd8-7806-46cb-b9cc-1be441cf71ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.212085 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39f28fd8-7806-46cb-b9cc-1be441cf71ef" (UID: "39f28fd8-7806-46cb-b9cc-1be441cf71ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.272761 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.272850 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39f28fd8-7806-46cb-b9cc-1be441cf71ef-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.272871 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsqkn\" (UniqueName: \"kubernetes.io/projected/39f28fd8-7806-46cb-b9cc-1be441cf71ef-kube-api-access-zsqkn\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.272891 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39f28fd8-7806-46cb-b9cc-1be441cf71ef-logs\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.287588 4792 generic.go:334] "Generic (PLEG): container finished" podID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerID="60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8" exitCode=0 Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.287840 4792 generic.go:334] "Generic (PLEG): container finished" podID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerID="70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27" exitCode=143 Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.289972 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.293672 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"39f28fd8-7806-46cb-b9cc-1be441cf71ef","Type":"ContainerDied","Data":"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8"} Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.293736 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"39f28fd8-7806-46cb-b9cc-1be441cf71ef","Type":"ContainerDied","Data":"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27"} Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.293746 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"39f28fd8-7806-46cb-b9cc-1be441cf71ef","Type":"ContainerDied","Data":"7a8e52caa71c9bc3af3bdc19dd3992f44ad98e40121a239bdcf113a78a7ac2ed"} Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.293763 4792 scope.go:117] "RemoveContainer" containerID="60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.323364 4792 scope.go:117] "RemoveContainer" containerID="70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.333319 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.347023 4792 scope.go:117] "RemoveContainer" containerID="60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8" Dec 02 19:00:27 crc kubenswrapper[4792]: E1202 19:00:27.347427 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8\": container with ID starting with 60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8 not found: ID does not exist" containerID="60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.347458 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8"} err="failed to get container status \"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8\": rpc error: code = NotFound desc = could not find container \"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8\": container with ID starting with 60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8 not found: ID does not exist" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.347482 4792 scope.go:117] "RemoveContainer" containerID="70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27" Dec 02 19:00:27 crc kubenswrapper[4792]: E1202 19:00:27.347725 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27\": container with ID starting with 70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27 not found: ID does not exist" containerID="70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.347768 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27"} err="failed to get container status \"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27\": rpc error: code = NotFound desc = could not find container \"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27\": container with ID starting with 70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27 not found: ID does not exist" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.347798 4792 scope.go:117] "RemoveContainer" containerID="60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.348212 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8"} err="failed to get container status \"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8\": rpc error: code = NotFound desc = could not find container \"60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8\": container with ID starting with 60e9312318e7ab7e68841b02a64218ac06d67ae09ea292eb89696e63eff708d8 not found: ID does not exist" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.348240 4792 scope.go:117] "RemoveContainer" containerID="70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.348464 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27"} err="failed to get container status \"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27\": rpc error: code = NotFound desc = could not find container \"70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27\": container with ID starting with 70f15b8f83b858c0d39332ea11eacb405c3c206c8ceb0fb0672d8cef40e4de27 not found: ID does not exist" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.352080 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.367410 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:27 crc kubenswrapper[4792]: E1202 19:00:27.367864 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-log" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.367888 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-log" Dec 02 19:00:27 crc kubenswrapper[4792]: E1202 19:00:27.367905 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-metadata" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.367912 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-metadata" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.368141 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-metadata" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.368162 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" containerName="nova-metadata-log" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.369260 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.377625 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.379009 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.379305 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.480297 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.480345 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbq4x\" (UniqueName: \"kubernetes.io/projected/c9372aa0-1bc6-460f-b0d3-959a963670c5-kube-api-access-rbq4x\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.480621 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.481018 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-config-data\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.481069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9372aa0-1bc6-460f-b0d3-959a963670c5-logs\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.550567 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39f28fd8-7806-46cb-b9cc-1be441cf71ef" path="/var/lib/kubelet/pods/39f28fd8-7806-46cb-b9cc-1be441cf71ef/volumes" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.582769 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-config-data\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.582818 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9372aa0-1bc6-460f-b0d3-959a963670c5-logs\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.582874 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.582904 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbq4x\" (UniqueName: \"kubernetes.io/projected/c9372aa0-1bc6-460f-b0d3-959a963670c5-kube-api-access-rbq4x\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.583138 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.587244 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.587329 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-config-data\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.587468 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9372aa0-1bc6-460f-b0d3-959a963670c5-logs\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.593992 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.605939 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbq4x\" (UniqueName: \"kubernetes.io/projected/c9372aa0-1bc6-460f-b0d3-959a963670c5-kube-api-access-rbq4x\") pod \"nova-metadata-0\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " pod="openstack/nova-metadata-0" Dec 02 19:00:27 crc kubenswrapper[4792]: I1202 19:00:27.697504 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:28 crc kubenswrapper[4792]: I1202 19:00:28.223118 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:28 crc kubenswrapper[4792]: I1202 19:00:28.300620 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c9372aa0-1bc6-460f-b0d3-959a963670c5","Type":"ContainerStarted","Data":"a4b1504f8601b6f257cafd0742d5f4f84c1b5dabfc8047afbbf16c302cf2d93e"} Dec 02 19:00:29 crc kubenswrapper[4792]: I1202 19:00:29.313772 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c9372aa0-1bc6-460f-b0d3-959a963670c5","Type":"ContainerStarted","Data":"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e"} Dec 02 19:00:29 crc kubenswrapper[4792]: I1202 19:00:29.314100 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c9372aa0-1bc6-460f-b0d3-959a963670c5","Type":"ContainerStarted","Data":"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6"} Dec 02 19:00:29 crc kubenswrapper[4792]: I1202 19:00:29.316863 4792 generic.go:334] "Generic (PLEG): container finished" podID="82858e4f-bc42-4839-8d38-9f7f1772a089" containerID="0dc8f4ae80975827647fd7ba656d100c9c201b93d36d62e17511fbed9a2ebae4" exitCode=0 Dec 02 19:00:29 crc kubenswrapper[4792]: I1202 19:00:29.316906 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-428v5" event={"ID":"82858e4f-bc42-4839-8d38-9f7f1772a089","Type":"ContainerDied","Data":"0dc8f4ae80975827647fd7ba656d100c9c201b93d36d62e17511fbed9a2ebae4"} Dec 02 19:00:29 crc kubenswrapper[4792]: I1202 19:00:29.335350 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.335329225 podStartE2EDuration="2.335329225s" podCreationTimestamp="2025-12-02 19:00:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:29.331190828 +0000 UTC m=+1460.104083156" watchObservedRunningTime="2025-12-02 19:00:29.335329225 +0000 UTC m=+1460.108221573" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.313613 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.704651 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.704712 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.725472 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.725544 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.733868 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.739675 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.810081 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-c9bv9"] Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.810299 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" podUID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerName="dnsmasq-dns" containerID="cri-o://1aa41d8bc5c9b7f52d3813de7c4fa792b2a5d3f3ebe25c20e24c2575d692a575" gracePeriod=10 Dec 02 19:00:30 crc kubenswrapper[4792]: I1202 19:00:30.988470 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.165529 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-config-data\") pod \"82858e4f-bc42-4839-8d38-9f7f1772a089\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.165691 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-scripts\") pod \"82858e4f-bc42-4839-8d38-9f7f1772a089\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.165872 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-combined-ca-bundle\") pod \"82858e4f-bc42-4839-8d38-9f7f1772a089\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.165921 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s26rc\" (UniqueName: \"kubernetes.io/projected/82858e4f-bc42-4839-8d38-9f7f1772a089-kube-api-access-s26rc\") pod \"82858e4f-bc42-4839-8d38-9f7f1772a089\" (UID: \"82858e4f-bc42-4839-8d38-9f7f1772a089\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.186372 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82858e4f-bc42-4839-8d38-9f7f1772a089-kube-api-access-s26rc" (OuterVolumeSpecName: "kube-api-access-s26rc") pod "82858e4f-bc42-4839-8d38-9f7f1772a089" (UID: "82858e4f-bc42-4839-8d38-9f7f1772a089"). InnerVolumeSpecName "kube-api-access-s26rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.193932 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-scripts" (OuterVolumeSpecName: "scripts") pod "82858e4f-bc42-4839-8d38-9f7f1772a089" (UID: "82858e4f-bc42-4839-8d38-9f7f1772a089"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.208130 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "82858e4f-bc42-4839-8d38-9f7f1772a089" (UID: "82858e4f-bc42-4839-8d38-9f7f1772a089"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.226269 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-config-data" (OuterVolumeSpecName: "config-data") pod "82858e4f-bc42-4839-8d38-9f7f1772a089" (UID: "82858e4f-bc42-4839-8d38-9f7f1772a089"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.269172 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.269215 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s26rc\" (UniqueName: \"kubernetes.io/projected/82858e4f-bc42-4839-8d38-9f7f1772a089-kube-api-access-s26rc\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.269233 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.269244 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82858e4f-bc42-4839-8d38-9f7f1772a089-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.342587 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-428v5" event={"ID":"82858e4f-bc42-4839-8d38-9f7f1772a089","Type":"ContainerDied","Data":"2c4559cddfde3fd29a01f0916f4bbd35b76fe4de2e96081657a65393a02a8113"} Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.342628 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c4559cddfde3fd29a01f0916f4bbd35b76fe4de2e96081657a65393a02a8113" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.342703 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-428v5" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.347937 4792 generic.go:334] "Generic (PLEG): container finished" podID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerID="1aa41d8bc5c9b7f52d3813de7c4fa792b2a5d3f3ebe25c20e24c2575d692a575" exitCode=0 Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.348308 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" event={"ID":"f05e35a9-ee56-406e-a58b-ec43e8c76dcf","Type":"ContainerDied","Data":"1aa41d8bc5c9b7f52d3813de7c4fa792b2a5d3f3ebe25c20e24c2575d692a575"} Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.371289 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.387933 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.473992 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ckb6\" (UniqueName: \"kubernetes.io/projected/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-kube-api-access-5ckb6\") pod \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.474045 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-svc\") pod \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.474101 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-swift-storage-0\") pod \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.474192 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-config\") pod \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.474300 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-nb\") pod \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.474349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-sb\") pod \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\" (UID: \"f05e35a9-ee56-406e-a58b-ec43e8c76dcf\") " Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.478703 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-kube-api-access-5ckb6" (OuterVolumeSpecName: "kube-api-access-5ckb6") pod "f05e35a9-ee56-406e-a58b-ec43e8c76dcf" (UID: "f05e35a9-ee56-406e-a58b-ec43e8c76dcf"). InnerVolumeSpecName "kube-api-access-5ckb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.547019 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f05e35a9-ee56-406e-a58b-ec43e8c76dcf" (UID: "f05e35a9-ee56-406e-a58b-ec43e8c76dcf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.558916 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f05e35a9-ee56-406e-a58b-ec43e8c76dcf" (UID: "f05e35a9-ee56-406e-a58b-ec43e8c76dcf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.567588 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f05e35a9-ee56-406e-a58b-ec43e8c76dcf" (UID: "f05e35a9-ee56-406e-a58b-ec43e8c76dcf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.572709 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.572915 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-log" containerID="cri-o://51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc" gracePeriod=30 Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.573370 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-api" containerID="cri-o://e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab" gracePeriod=30 Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.577140 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ckb6\" (UniqueName: \"kubernetes.io/projected/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-kube-api-access-5ckb6\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.577163 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.577174 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.577182 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.577339 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": EOF" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.577759 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": EOF" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.579754 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f05e35a9-ee56-406e-a58b-ec43e8c76dcf" (UID: "f05e35a9-ee56-406e-a58b-ec43e8c76dcf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.579802 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.580149 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-log" containerID="cri-o://01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6" gracePeriod=30 Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.580697 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-metadata" containerID="cri-o://dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e" gracePeriod=30 Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.607217 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-config" (OuterVolumeSpecName: "config") pod "f05e35a9-ee56-406e-a58b-ec43e8c76dcf" (UID: "f05e35a9-ee56-406e-a58b-ec43e8c76dcf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.678673 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:31 crc kubenswrapper[4792]: I1202 19:00:31.678896 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f05e35a9-ee56-406e-a58b-ec43e8c76dcf-config\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.064331 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.214772 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.289957 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9372aa0-1bc6-460f-b0d3-959a963670c5-logs\") pod \"c9372aa0-1bc6-460f-b0d3-959a963670c5\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.290042 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-nova-metadata-tls-certs\") pod \"c9372aa0-1bc6-460f-b0d3-959a963670c5\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.290082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-combined-ca-bundle\") pod \"c9372aa0-1bc6-460f-b0d3-959a963670c5\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.290210 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbq4x\" (UniqueName: \"kubernetes.io/projected/c9372aa0-1bc6-460f-b0d3-959a963670c5-kube-api-access-rbq4x\") pod \"c9372aa0-1bc6-460f-b0d3-959a963670c5\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.290245 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-config-data\") pod \"c9372aa0-1bc6-460f-b0d3-959a963670c5\" (UID: \"c9372aa0-1bc6-460f-b0d3-959a963670c5\") " Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.290730 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9372aa0-1bc6-460f-b0d3-959a963670c5-logs" (OuterVolumeSpecName: "logs") pod "c9372aa0-1bc6-460f-b0d3-959a963670c5" (UID: "c9372aa0-1bc6-460f-b0d3-959a963670c5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.314434 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9372aa0-1bc6-460f-b0d3-959a963670c5-kube-api-access-rbq4x" (OuterVolumeSpecName: "kube-api-access-rbq4x") pod "c9372aa0-1bc6-460f-b0d3-959a963670c5" (UID: "c9372aa0-1bc6-460f-b0d3-959a963670c5"). InnerVolumeSpecName "kube-api-access-rbq4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.345638 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9372aa0-1bc6-460f-b0d3-959a963670c5" (UID: "c9372aa0-1bc6-460f-b0d3-959a963670c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362463 4792 generic.go:334] "Generic (PLEG): container finished" podID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerID="dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e" exitCode=0 Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362493 4792 generic.go:334] "Generic (PLEG): container finished" podID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerID="01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6" exitCode=143 Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362551 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c9372aa0-1bc6-460f-b0d3-959a963670c5","Type":"ContainerDied","Data":"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e"} Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362578 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c9372aa0-1bc6-460f-b0d3-959a963670c5","Type":"ContainerDied","Data":"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6"} Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362588 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c9372aa0-1bc6-460f-b0d3-959a963670c5","Type":"ContainerDied","Data":"a4b1504f8601b6f257cafd0742d5f4f84c1b5dabfc8047afbbf16c302cf2d93e"} Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362603 4792 scope.go:117] "RemoveContainer" containerID="dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362724 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.362725 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c9372aa0-1bc6-460f-b0d3-959a963670c5" (UID: "c9372aa0-1bc6-460f-b0d3-959a963670c5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.372880 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" event={"ID":"f05e35a9-ee56-406e-a58b-ec43e8c76dcf","Type":"ContainerDied","Data":"be005de97aed1a4b16838470ee4b48748e3b693d6d50f92d1237ba111e4b85d8"} Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.372997 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-c9bv9" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.378590 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-config-data" (OuterVolumeSpecName: "config-data") pod "c9372aa0-1bc6-460f-b0d3-959a963670c5" (UID: "c9372aa0-1bc6-460f-b0d3-959a963670c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.380774 4792 generic.go:334] "Generic (PLEG): container finished" podID="6194d48a-051d-4700-ab26-11280eb387f3" containerID="51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc" exitCode=143 Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.381659 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6194d48a-051d-4700-ab26-11280eb387f3","Type":"ContainerDied","Data":"51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc"} Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.393182 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9372aa0-1bc6-460f-b0d3-959a963670c5-logs\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.393210 4792 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.393222 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.393230 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbq4x\" (UniqueName: \"kubernetes.io/projected/c9372aa0-1bc6-460f-b0d3-959a963670c5-kube-api-access-rbq4x\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.393239 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9372aa0-1bc6-460f-b0d3-959a963670c5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.431633 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-c9bv9"] Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.431659 4792 scope.go:117] "RemoveContainer" containerID="01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.446425 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-c9bv9"] Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.463391 4792 scope.go:117] "RemoveContainer" containerID="dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e" Dec 02 19:00:32 crc kubenswrapper[4792]: E1202 19:00:32.463933 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e\": container with ID starting with dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e not found: ID does not exist" containerID="dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.463975 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e"} err="failed to get container status \"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e\": rpc error: code = NotFound desc = could not find container \"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e\": container with ID starting with dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e not found: ID does not exist" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.464003 4792 scope.go:117] "RemoveContainer" containerID="01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6" Dec 02 19:00:32 crc kubenswrapper[4792]: E1202 19:00:32.464497 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6\": container with ID starting with 01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6 not found: ID does not exist" containerID="01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.464541 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6"} err="failed to get container status \"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6\": rpc error: code = NotFound desc = could not find container \"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6\": container with ID starting with 01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6 not found: ID does not exist" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.464558 4792 scope.go:117] "RemoveContainer" containerID="dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.464754 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e"} err="failed to get container status \"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e\": rpc error: code = NotFound desc = could not find container \"dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e\": container with ID starting with dd99c7bf4e3327f69de4fec44601ef8a8995df0bee14d9b292db580f2f59402e not found: ID does not exist" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.464779 4792 scope.go:117] "RemoveContainer" containerID="01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.464963 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6"} err="failed to get container status \"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6\": rpc error: code = NotFound desc = could not find container \"01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6\": container with ID starting with 01f016f3d370112bf9ddd644e3e29953f1c5e66461e533325b8c4656feff5aa6 not found: ID does not exist" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.464987 4792 scope.go:117] "RemoveContainer" containerID="1aa41d8bc5c9b7f52d3813de7c4fa792b2a5d3f3ebe25c20e24c2575d692a575" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.488284 4792 scope.go:117] "RemoveContainer" containerID="99c7a90d2b43aa129dfd5528603d5619a7fe1345a284c0a1747fb00543be5ada" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.711743 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.723313 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733237 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:32 crc kubenswrapper[4792]: E1202 19:00:32.733629 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerName="dnsmasq-dns" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733645 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerName="dnsmasq-dns" Dec 02 19:00:32 crc kubenswrapper[4792]: E1202 19:00:32.733676 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-metadata" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733682 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-metadata" Dec 02 19:00:32 crc kubenswrapper[4792]: E1202 19:00:32.733703 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerName="init" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733710 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerName="init" Dec 02 19:00:32 crc kubenswrapper[4792]: E1202 19:00:32.733725 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-log" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733733 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-log" Dec 02 19:00:32 crc kubenswrapper[4792]: E1202 19:00:32.733743 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82858e4f-bc42-4839-8d38-9f7f1772a089" containerName="nova-manage" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733749 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="82858e4f-bc42-4839-8d38-9f7f1772a089" containerName="nova-manage" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733933 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="82858e4f-bc42-4839-8d38-9f7f1772a089" containerName="nova-manage" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733951 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-metadata" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733971 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" containerName="dnsmasq-dns" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.733984 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" containerName="nova-metadata-log" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.738402 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.740581 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.742209 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.756551 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.903508 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c128c153-5ac8-4dd5-9952-4ad8960e6525-logs\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.903839 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.903958 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.904144 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htqct\" (UniqueName: \"kubernetes.io/projected/c128c153-5ac8-4dd5-9952-4ad8960e6525-kube-api-access-htqct\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:32 crc kubenswrapper[4792]: I1202 19:00:32.904190 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-config-data\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.006262 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htqct\" (UniqueName: \"kubernetes.io/projected/c128c153-5ac8-4dd5-9952-4ad8960e6525-kube-api-access-htqct\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.006314 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-config-data\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.006351 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c128c153-5ac8-4dd5-9952-4ad8960e6525-logs\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.006411 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.006444 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.006795 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c128c153-5ac8-4dd5-9952-4ad8960e6525-logs\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.013025 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.013501 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-config-data\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.024424 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.028248 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htqct\" (UniqueName: \"kubernetes.io/projected/c128c153-5ac8-4dd5-9952-4ad8960e6525-kube-api-access-htqct\") pod \"nova-metadata-0\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.054656 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.432409 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" containerName="nova-scheduler-scheduler" containerID="cri-o://d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" gracePeriod=30 Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.550125 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9372aa0-1bc6-460f-b0d3-959a963670c5" path="/var/lib/kubelet/pods/c9372aa0-1bc6-460f-b0d3-959a963670c5/volumes" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.551103 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f05e35a9-ee56-406e-a58b-ec43e8c76dcf" path="/var/lib/kubelet/pods/f05e35a9-ee56-406e-a58b-ec43e8c76dcf/volumes" Dec 02 19:00:33 crc kubenswrapper[4792]: I1202 19:00:33.605039 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:00:33 crc kubenswrapper[4792]: W1202 19:00:33.634314 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc128c153_5ac8_4dd5_9952_4ad8960e6525.slice/crio-4c54e402de832e5557ce045c6cd5f6f74fb0ed73086eb6704790ddd1d9baecff WatchSource:0}: Error finding container 4c54e402de832e5557ce045c6cd5f6f74fb0ed73086eb6704790ddd1d9baecff: Status 404 returned error can't find the container with id 4c54e402de832e5557ce045c6cd5f6f74fb0ed73086eb6704790ddd1d9baecff Dec 02 19:00:34 crc kubenswrapper[4792]: I1202 19:00:34.443799 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c128c153-5ac8-4dd5-9952-4ad8960e6525","Type":"ContainerStarted","Data":"dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37"} Dec 02 19:00:34 crc kubenswrapper[4792]: I1202 19:00:34.444136 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c128c153-5ac8-4dd5-9952-4ad8960e6525","Type":"ContainerStarted","Data":"0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2"} Dec 02 19:00:34 crc kubenswrapper[4792]: I1202 19:00:34.444157 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c128c153-5ac8-4dd5-9952-4ad8960e6525","Type":"ContainerStarted","Data":"4c54e402de832e5557ce045c6cd5f6f74fb0ed73086eb6704790ddd1d9baecff"} Dec 02 19:00:34 crc kubenswrapper[4792]: I1202 19:00:34.447311 4792 generic.go:334] "Generic (PLEG): container finished" podID="67c9d25a-3961-4b7d-bad6-340311a55dde" containerID="9dd237969616cdf7de0455b8afae5553788744c495490aaf1e021952c331f415" exitCode=0 Dec 02 19:00:34 crc kubenswrapper[4792]: I1202 19:00:34.447340 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-glqwc" event={"ID":"67c9d25a-3961-4b7d-bad6-340311a55dde","Type":"ContainerDied","Data":"9dd237969616cdf7de0455b8afae5553788744c495490aaf1e021952c331f415"} Dec 02 19:00:34 crc kubenswrapper[4792]: I1202 19:00:34.476010 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.4759896 podStartE2EDuration="2.4759896s" podCreationTimestamp="2025-12-02 19:00:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:34.464707227 +0000 UTC m=+1465.237599555" watchObservedRunningTime="2025-12-02 19:00:34.4759896 +0000 UTC m=+1465.248881918" Dec 02 19:00:35 crc kubenswrapper[4792]: E1202 19:00:35.707665 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 19:00:35 crc kubenswrapper[4792]: E1202 19:00:35.710342 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 19:00:35 crc kubenswrapper[4792]: E1202 19:00:35.717880 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 19:00:35 crc kubenswrapper[4792]: E1202 19:00:35.717965 4792 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" containerName="nova-scheduler-scheduler" Dec 02 19:00:35 crc kubenswrapper[4792]: I1202 19:00:35.991791 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.079854 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-combined-ca-bundle\") pod \"67c9d25a-3961-4b7d-bad6-340311a55dde\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.079909 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjk8m\" (UniqueName: \"kubernetes.io/projected/67c9d25a-3961-4b7d-bad6-340311a55dde-kube-api-access-qjk8m\") pod \"67c9d25a-3961-4b7d-bad6-340311a55dde\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.080122 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-scripts\") pod \"67c9d25a-3961-4b7d-bad6-340311a55dde\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.080170 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-config-data\") pod \"67c9d25a-3961-4b7d-bad6-340311a55dde\" (UID: \"67c9d25a-3961-4b7d-bad6-340311a55dde\") " Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.085720 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-scripts" (OuterVolumeSpecName: "scripts") pod "67c9d25a-3961-4b7d-bad6-340311a55dde" (UID: "67c9d25a-3961-4b7d-bad6-340311a55dde"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.086065 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67c9d25a-3961-4b7d-bad6-340311a55dde-kube-api-access-qjk8m" (OuterVolumeSpecName: "kube-api-access-qjk8m") pod "67c9d25a-3961-4b7d-bad6-340311a55dde" (UID: "67c9d25a-3961-4b7d-bad6-340311a55dde"). InnerVolumeSpecName "kube-api-access-qjk8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.110228 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67c9d25a-3961-4b7d-bad6-340311a55dde" (UID: "67c9d25a-3961-4b7d-bad6-340311a55dde"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.112946 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-config-data" (OuterVolumeSpecName: "config-data") pod "67c9d25a-3961-4b7d-bad6-340311a55dde" (UID: "67c9d25a-3961-4b7d-bad6-340311a55dde"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.182052 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.182223 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.182292 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67c9d25a-3961-4b7d-bad6-340311a55dde-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.182355 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjk8m\" (UniqueName: \"kubernetes.io/projected/67c9d25a-3961-4b7d-bad6-340311a55dde-kube-api-access-qjk8m\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.506176 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-glqwc" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.509724 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-glqwc" event={"ID":"67c9d25a-3961-4b7d-bad6-340311a55dde","Type":"ContainerDied","Data":"59e7f042467af8b6a374137f3bdcac4f549bd4104b755cdb1be321401b509434"} Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.509835 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59e7f042467af8b6a374137f3bdcac4f549bd4104b755cdb1be321401b509434" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.585949 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 19:00:36 crc kubenswrapper[4792]: E1202 19:00:36.586409 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67c9d25a-3961-4b7d-bad6-340311a55dde" containerName="nova-cell1-conductor-db-sync" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.586432 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="67c9d25a-3961-4b7d-bad6-340311a55dde" containerName="nova-cell1-conductor-db-sync" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.587161 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="67c9d25a-3961-4b7d-bad6-340311a55dde" containerName="nova-cell1-conductor-db-sync" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.587937 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.592435 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.628666 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.698769 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbwhq\" (UniqueName: \"kubernetes.io/projected/a1884f2e-062d-4a08-aff1-59d9316bfff8-kube-api-access-mbwhq\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.700002 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1884f2e-062d-4a08-aff1-59d9316bfff8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.700127 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1884f2e-062d-4a08-aff1-59d9316bfff8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.802000 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbwhq\" (UniqueName: \"kubernetes.io/projected/a1884f2e-062d-4a08-aff1-59d9316bfff8-kube-api-access-mbwhq\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.803489 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1884f2e-062d-4a08-aff1-59d9316bfff8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.804380 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1884f2e-062d-4a08-aff1-59d9316bfff8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.813767 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1884f2e-062d-4a08-aff1-59d9316bfff8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.813936 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1884f2e-062d-4a08-aff1-59d9316bfff8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.823566 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbwhq\" (UniqueName: \"kubernetes.io/projected/a1884f2e-062d-4a08-aff1-59d9316bfff8-kube-api-access-mbwhq\") pod \"nova-cell1-conductor-0\" (UID: \"a1884f2e-062d-4a08-aff1-59d9316bfff8\") " pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:36 crc kubenswrapper[4792]: I1202 19:00:36.908930 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.291126 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.415460 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt7rj\" (UniqueName: \"kubernetes.io/projected/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-kube-api-access-wt7rj\") pod \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.415511 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-config-data\") pod \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.415650 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-combined-ca-bundle\") pod \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\" (UID: \"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6\") " Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.437866 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-kube-api-access-wt7rj" (OuterVolumeSpecName: "kube-api-access-wt7rj") pod "f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" (UID: "f41d9f2a-9c86-4a7c-95d3-bcf1349690e6"). InnerVolumeSpecName "kube-api-access-wt7rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.448104 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.484822 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" (UID: "f41d9f2a-9c86-4a7c-95d3-bcf1349690e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.489685 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-config-data" (OuterVolumeSpecName: "config-data") pod "f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" (UID: "f41d9f2a-9c86-4a7c-95d3-bcf1349690e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.521815 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt7rj\" (UniqueName: \"kubernetes.io/projected/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-kube-api-access-wt7rj\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.521899 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.521955 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.537225 4792 generic.go:334] "Generic (PLEG): container finished" podID="f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" containerID="d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" exitCode=0 Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.537286 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6","Type":"ContainerDied","Data":"d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721"} Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.537313 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f41d9f2a-9c86-4a7c-95d3-bcf1349690e6","Type":"ContainerDied","Data":"e400ad10f3e6b6f5dab242b9c970f7a1b8e4c581c7f8f22677676a8e836a345f"} Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.537331 4792 scope.go:117] "RemoveContainer" containerID="d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.537450 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.551300 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a1884f2e-062d-4a08-aff1-59d9316bfff8","Type":"ContainerStarted","Data":"e9b8d876ff6d773435ab45ac6e49fe9b347af5047ebcdfbf8378b62227d4f31c"} Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.585974 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.586151 4792 scope.go:117] "RemoveContainer" containerID="d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" Dec 02 19:00:37 crc kubenswrapper[4792]: E1202 19:00:37.588438 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721\": container with ID starting with d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721 not found: ID does not exist" containerID="d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.588488 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721"} err="failed to get container status \"d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721\": rpc error: code = NotFound desc = could not find container \"d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721\": container with ID starting with d8711fc32e54d6d5146c9b3146614173ebfbc41ceea3d991f309fdb82e867721 not found: ID does not exist" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.601608 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.614931 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:37 crc kubenswrapper[4792]: E1202 19:00:37.615412 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" containerName="nova-scheduler-scheduler" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.615429 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" containerName="nova-scheduler-scheduler" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.615694 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" containerName="nova-scheduler-scheduler" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.616428 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.622105 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.630424 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.725290 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-config-data\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.725405 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.725490 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6q4c\" (UniqueName: \"kubernetes.io/projected/89ec05ac-1237-4cfd-a36e-59bc6933890e-kube-api-access-f6q4c\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.827846 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.827940 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6q4c\" (UniqueName: \"kubernetes.io/projected/89ec05ac-1237-4cfd-a36e-59bc6933890e-kube-api-access-f6q4c\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.828049 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-config-data\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.832283 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-config-data\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.832731 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.850395 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6q4c\" (UniqueName: \"kubernetes.io/projected/89ec05ac-1237-4cfd-a36e-59bc6933890e-kube-api-access-f6q4c\") pod \"nova-scheduler-0\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " pod="openstack/nova-scheduler-0" Dec 02 19:00:37 crc kubenswrapper[4792]: I1202 19:00:37.933512 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.058235 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.058465 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.444034 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:00:38 crc kubenswrapper[4792]: W1202 19:00:38.445636 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89ec05ac_1237_4cfd_a36e_59bc6933890e.slice/crio-022f3a17a3a4d4df8d301a55ad40e3a37e5d1364f5d5e1b21bc584f046278069 WatchSource:0}: Error finding container 022f3a17a3a4d4df8d301a55ad40e3a37e5d1364f5d5e1b21bc584f046278069: Status 404 returned error can't find the container with id 022f3a17a3a4d4df8d301a55ad40e3a37e5d1364f5d5e1b21bc584f046278069 Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.526972 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.581750 4792 generic.go:334] "Generic (PLEG): container finished" podID="6194d48a-051d-4700-ab26-11280eb387f3" containerID="e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab" exitCode=0 Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.581814 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.581822 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6194d48a-051d-4700-ab26-11280eb387f3","Type":"ContainerDied","Data":"e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab"} Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.581952 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6194d48a-051d-4700-ab26-11280eb387f3","Type":"ContainerDied","Data":"a5478053b7757aa402a7528b8186da4ba23ff40bae7dac562d743fe1bca6c749"} Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.581975 4792 scope.go:117] "RemoveContainer" containerID="e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.586765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"89ec05ac-1237-4cfd-a36e-59bc6933890e","Type":"ContainerStarted","Data":"022f3a17a3a4d4df8d301a55ad40e3a37e5d1364f5d5e1b21bc584f046278069"} Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.595049 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a1884f2e-062d-4a08-aff1-59d9316bfff8","Type":"ContainerStarted","Data":"4157a83d11518aaded7fd70f25f5132c6814ee4eb984eafea7bf993f70351782"} Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.595836 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.619552 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.619533657 podStartE2EDuration="2.619533657s" podCreationTimestamp="2025-12-02 19:00:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:38.606276313 +0000 UTC m=+1469.379168651" watchObservedRunningTime="2025-12-02 19:00:38.619533657 +0000 UTC m=+1469.392425985" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.624609 4792 scope.go:117] "RemoveContainer" containerID="51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.644825 4792 scope.go:117] "RemoveContainer" containerID="e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab" Dec 02 19:00:38 crc kubenswrapper[4792]: E1202 19:00:38.645515 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab\": container with ID starting with e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab not found: ID does not exist" containerID="e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.645617 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab"} err="failed to get container status \"e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab\": rpc error: code = NotFound desc = could not find container \"e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab\": container with ID starting with e3a308d998f1339d73d29d80c40d6fab0707ed42cbffb57a0bc75748a776e7ab not found: ID does not exist" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.645693 4792 scope.go:117] "RemoveContainer" containerID="51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc" Dec 02 19:00:38 crc kubenswrapper[4792]: E1202 19:00:38.646022 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc\": container with ID starting with 51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc not found: ID does not exist" containerID="51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.646111 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc"} err="failed to get container status \"51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc\": rpc error: code = NotFound desc = could not find container \"51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc\": container with ID starting with 51c43252594109d84fc6199823d9e46620e01716bc34745d24ed9a01874945fc not found: ID does not exist" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.648900 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgx8h\" (UniqueName: \"kubernetes.io/projected/6194d48a-051d-4700-ab26-11280eb387f3-kube-api-access-rgx8h\") pod \"6194d48a-051d-4700-ab26-11280eb387f3\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.649116 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6194d48a-051d-4700-ab26-11280eb387f3-logs\") pod \"6194d48a-051d-4700-ab26-11280eb387f3\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.649386 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-combined-ca-bundle\") pod \"6194d48a-051d-4700-ab26-11280eb387f3\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.649479 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-config-data\") pod \"6194d48a-051d-4700-ab26-11280eb387f3\" (UID: \"6194d48a-051d-4700-ab26-11280eb387f3\") " Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.651699 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6194d48a-051d-4700-ab26-11280eb387f3-logs" (OuterVolumeSpecName: "logs") pod "6194d48a-051d-4700-ab26-11280eb387f3" (UID: "6194d48a-051d-4700-ab26-11280eb387f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.656268 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6194d48a-051d-4700-ab26-11280eb387f3-kube-api-access-rgx8h" (OuterVolumeSpecName: "kube-api-access-rgx8h") pod "6194d48a-051d-4700-ab26-11280eb387f3" (UID: "6194d48a-051d-4700-ab26-11280eb387f3"). InnerVolumeSpecName "kube-api-access-rgx8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.683148 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6194d48a-051d-4700-ab26-11280eb387f3" (UID: "6194d48a-051d-4700-ab26-11280eb387f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.690774 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-config-data" (OuterVolumeSpecName: "config-data") pod "6194d48a-051d-4700-ab26-11280eb387f3" (UID: "6194d48a-051d-4700-ab26-11280eb387f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.752415 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgx8h\" (UniqueName: \"kubernetes.io/projected/6194d48a-051d-4700-ab26-11280eb387f3-kube-api-access-rgx8h\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.752447 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6194d48a-051d-4700-ab26-11280eb387f3-logs\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.752456 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.752466 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6194d48a-051d-4700-ab26-11280eb387f3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.951984 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:38 crc kubenswrapper[4792]: I1202 19:00:38.959749 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.015245 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:39 crc kubenswrapper[4792]: E1202 19:00:39.015971 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-api" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.015985 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-api" Dec 02 19:00:39 crc kubenswrapper[4792]: E1202 19:00:39.016017 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-log" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.016023 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-log" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.016854 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-log" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.016873 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6194d48a-051d-4700-ab26-11280eb387f3" containerName="nova-api-api" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.019880 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.024617 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.045104 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.160268 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-config-data\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.160388 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mqk6\" (UniqueName: \"kubernetes.io/projected/794f62c5-7842-4616-aa8e-c968e23bc67d-kube-api-access-6mqk6\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.160418 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f62c5-7842-4616-aa8e-c968e23bc67d-logs\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.160441 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.262698 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-config-data\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.263323 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mqk6\" (UniqueName: \"kubernetes.io/projected/794f62c5-7842-4616-aa8e-c968e23bc67d-kube-api-access-6mqk6\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.263801 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f62c5-7842-4616-aa8e-c968e23bc67d-logs\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.263868 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f62c5-7842-4616-aa8e-c968e23bc67d-logs\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.263899 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.277461 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.280869 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-config-data\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.295486 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mqk6\" (UniqueName: \"kubernetes.io/projected/794f62c5-7842-4616-aa8e-c968e23bc67d-kube-api-access-6mqk6\") pod \"nova-api-0\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.345212 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.561054 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6194d48a-051d-4700-ab26-11280eb387f3" path="/var/lib/kubelet/pods/6194d48a-051d-4700-ab26-11280eb387f3/volumes" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.565494 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f41d9f2a-9c86-4a7c-95d3-bcf1349690e6" path="/var/lib/kubelet/pods/f41d9f2a-9c86-4a7c-95d3-bcf1349690e6/volumes" Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.610894 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"89ec05ac-1237-4cfd-a36e-59bc6933890e","Type":"ContainerStarted","Data":"79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a"} Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.641313 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.641290331 podStartE2EDuration="2.641290331s" podCreationTimestamp="2025-12-02 19:00:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:39.627932305 +0000 UTC m=+1470.400824643" watchObservedRunningTime="2025-12-02 19:00:39.641290331 +0000 UTC m=+1470.414182669" Dec 02 19:00:39 crc kubenswrapper[4792]: W1202 19:00:39.850703 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod794f62c5_7842_4616_aa8e_c968e23bc67d.slice/crio-de162f77aefbdc6bf9a16f6d1c249d102f0e6796647ab45bba1efb0503a79696 WatchSource:0}: Error finding container de162f77aefbdc6bf9a16f6d1c249d102f0e6796647ab45bba1efb0503a79696: Status 404 returned error can't find the container with id de162f77aefbdc6bf9a16f6d1c249d102f0e6796647ab45bba1efb0503a79696 Dec 02 19:00:39 crc kubenswrapper[4792]: I1202 19:00:39.856956 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:00:40 crc kubenswrapper[4792]: I1202 19:00:40.624554 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"794f62c5-7842-4616-aa8e-c968e23bc67d","Type":"ContainerStarted","Data":"8c0557f540807fc38260cb813f4ce2cdda3c8403de6ac4671676bb6bb72c69b3"} Dec 02 19:00:40 crc kubenswrapper[4792]: I1202 19:00:40.625056 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"794f62c5-7842-4616-aa8e-c968e23bc67d","Type":"ContainerStarted","Data":"f1d8db753027933f929de26c22a72e4fbd1e779d9a3517053268b5d9ff2b4c45"} Dec 02 19:00:40 crc kubenswrapper[4792]: I1202 19:00:40.625074 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"794f62c5-7842-4616-aa8e-c968e23bc67d","Type":"ContainerStarted","Data":"de162f77aefbdc6bf9a16f6d1c249d102f0e6796647ab45bba1efb0503a79696"} Dec 02 19:00:40 crc kubenswrapper[4792]: I1202 19:00:40.649021 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.6490049940000002 podStartE2EDuration="2.649004994s" podCreationTimestamp="2025-12-02 19:00:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:40.64229179 +0000 UTC m=+1471.415184118" watchObservedRunningTime="2025-12-02 19:00:40.649004994 +0000 UTC m=+1471.421897322" Dec 02 19:00:42 crc kubenswrapper[4792]: I1202 19:00:42.368778 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 19:00:42 crc kubenswrapper[4792]: I1202 19:00:42.934660 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 19:00:43 crc kubenswrapper[4792]: I1202 19:00:43.056483 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 19:00:43 crc kubenswrapper[4792]: I1202 19:00:43.059958 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 19:00:44 crc kubenswrapper[4792]: I1202 19:00:44.067719 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.217:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 19:00:44 crc kubenswrapper[4792]: I1202 19:00:44.067759 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.217:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 19:00:46 crc kubenswrapper[4792]: I1202 19:00:46.219224 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 19:00:46 crc kubenswrapper[4792]: I1202 19:00:46.219671 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" containerName="kube-state-metrics" containerID="cri-o://b59d7484f19a224613f05a314b072578911a11c84b176d18185925e3062fdd18" gracePeriod=30 Dec 02 19:00:46 crc kubenswrapper[4792]: I1202 19:00:46.699156 4792 generic.go:334] "Generic (PLEG): container finished" podID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" containerID="b59d7484f19a224613f05a314b072578911a11c84b176d18185925e3062fdd18" exitCode=2 Dec 02 19:00:46 crc kubenswrapper[4792]: I1202 19:00:46.699364 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a9324ab-6fb7-4057-bf68-e74e0907aa80","Type":"ContainerDied","Data":"b59d7484f19a224613f05a314b072578911a11c84b176d18185925e3062fdd18"} Dec 02 19:00:46 crc kubenswrapper[4792]: I1202 19:00:46.972109 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.211296 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.251137 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqrdh\" (UniqueName: \"kubernetes.io/projected/3a9324ab-6fb7-4057-bf68-e74e0907aa80-kube-api-access-lqrdh\") pod \"3a9324ab-6fb7-4057-bf68-e74e0907aa80\" (UID: \"3a9324ab-6fb7-4057-bf68-e74e0907aa80\") " Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.258671 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a9324ab-6fb7-4057-bf68-e74e0907aa80-kube-api-access-lqrdh" (OuterVolumeSpecName: "kube-api-access-lqrdh") pod "3a9324ab-6fb7-4057-bf68-e74e0907aa80" (UID: "3a9324ab-6fb7-4057-bf68-e74e0907aa80"). InnerVolumeSpecName "kube-api-access-lqrdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.355504 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqrdh\" (UniqueName: \"kubernetes.io/projected/3a9324ab-6fb7-4057-bf68-e74e0907aa80-kube-api-access-lqrdh\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.712138 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3a9324ab-6fb7-4057-bf68-e74e0907aa80","Type":"ContainerDied","Data":"6e4623a6ce87d895481d98f1f68512eab5a5bba03faa5351917d106a0595748d"} Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.712191 4792 scope.go:117] "RemoveContainer" containerID="b59d7484f19a224613f05a314b072578911a11c84b176d18185925e3062fdd18" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.712206 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.741561 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.756146 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.766732 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 19:00:47 crc kubenswrapper[4792]: E1202 19:00:47.767312 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" containerName="kube-state-metrics" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.767334 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" containerName="kube-state-metrics" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.767693 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" containerName="kube-state-metrics" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.768734 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.771489 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.771621 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.777488 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.868109 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.868207 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.868464 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.868584 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkcrr\" (UniqueName: \"kubernetes.io/projected/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-api-access-dkcrr\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.934341 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.971341 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.971408 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.971478 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.971515 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkcrr\" (UniqueName: \"kubernetes.io/projected/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-api-access-dkcrr\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.978444 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.981641 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.998562 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkcrr\" (UniqueName: \"kubernetes.io/projected/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-kube-api-access-dkcrr\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:47 crc kubenswrapper[4792]: I1202 19:00:47.998868 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a3a1a3-54f5-4734-aade-5e64bcad49a4-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e9a3a1a3-54f5-4734-aade-5e64bcad49a4\") " pod="openstack/kube-state-metrics-0" Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.000914 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.091410 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.299891 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.301360 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="proxy-httpd" containerID="cri-o://cf37d2c6f1947ee99004a85750d08ab4604a9271fadad530a025b430987e144c" gracePeriod=30 Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.301628 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="sg-core" containerID="cri-o://db82f0bb76c64bdde4546c248e7fc8193594c10420a57d4c7770ec959341fb04" gracePeriod=30 Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.301744 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-notification-agent" containerID="cri-o://5a9b1582e15bb65792e7df9a181726aa3fb9f3f8acb6aff76c21e16bc6d385df" gracePeriod=30 Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.301797 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-central-agent" containerID="cri-o://a3313d463969b31df8605a6d07db02a35a918d4eba57257fb921e8313dfbb57a" gracePeriod=30 Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.619101 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 19:00:48 crc kubenswrapper[4792]: W1202 19:00:48.623171 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9a3a1a3_54f5_4734_aade_5e64bcad49a4.slice/crio-d37d4502b547b03f417cbc5156085c2683708b731961c9339b34848f78d7e45a WatchSource:0}: Error finding container d37d4502b547b03f417cbc5156085c2683708b731961c9339b34848f78d7e45a: Status 404 returned error can't find the container with id d37d4502b547b03f417cbc5156085c2683708b731961c9339b34848f78d7e45a Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.725370 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e9a3a1a3-54f5-4734-aade-5e64bcad49a4","Type":"ContainerStarted","Data":"d37d4502b547b03f417cbc5156085c2683708b731961c9339b34848f78d7e45a"} Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.746111 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerID="cf37d2c6f1947ee99004a85750d08ab4604a9271fadad530a025b430987e144c" exitCode=0 Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.746150 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerID="db82f0bb76c64bdde4546c248e7fc8193594c10420a57d4c7770ec959341fb04" exitCode=2 Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.746187 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerDied","Data":"cf37d2c6f1947ee99004a85750d08ab4604a9271fadad530a025b430987e144c"} Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.746237 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerDied","Data":"db82f0bb76c64bdde4546c248e7fc8193594c10420a57d4c7770ec959341fb04"} Dec 02 19:00:48 crc kubenswrapper[4792]: I1202 19:00:48.792989 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 19:00:49 crc kubenswrapper[4792]: I1202 19:00:49.346660 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 19:00:49 crc kubenswrapper[4792]: I1202 19:00:49.347089 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 19:00:49 crc kubenswrapper[4792]: I1202 19:00:49.561511 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a9324ab-6fb7-4057-bf68-e74e0907aa80" path="/var/lib/kubelet/pods/3a9324ab-6fb7-4057-bf68-e74e0907aa80/volumes" Dec 02 19:00:49 crc kubenswrapper[4792]: I1202 19:00:49.758618 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerID="a3313d463969b31df8605a6d07db02a35a918d4eba57257fb921e8313dfbb57a" exitCode=0 Dec 02 19:00:49 crc kubenswrapper[4792]: I1202 19:00:49.758688 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerDied","Data":"a3313d463969b31df8605a6d07db02a35a918d4eba57257fb921e8313dfbb57a"} Dec 02 19:00:49 crc kubenswrapper[4792]: I1202 19:00:49.760145 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e9a3a1a3-54f5-4734-aade-5e64bcad49a4","Type":"ContainerStarted","Data":"f0ffe22ca9330832d0778f2ff295c6961ccb72fee133e0ff058151745616f4f4"} Dec 02 19:00:49 crc kubenswrapper[4792]: I1202 19:00:49.778679 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.273659514 podStartE2EDuration="2.77865924s" podCreationTimestamp="2025-12-02 19:00:47 +0000 UTC" firstStartedPulling="2025-12-02 19:00:48.62591769 +0000 UTC m=+1479.398810008" lastFinishedPulling="2025-12-02 19:00:49.130917406 +0000 UTC m=+1479.903809734" observedRunningTime="2025-12-02 19:00:49.776212957 +0000 UTC m=+1480.549105285" watchObservedRunningTime="2025-12-02 19:00:49.77865924 +0000 UTC m=+1480.551551568" Dec 02 19:00:50 crc kubenswrapper[4792]: I1202 19:00:50.428689 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.220:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 19:00:50 crc kubenswrapper[4792]: I1202 19:00:50.428705 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.220:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 19:00:50 crc kubenswrapper[4792]: I1202 19:00:50.769908 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.061371 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.062826 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.077746 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.802046 4792 generic.go:334] "Generic (PLEG): container finished" podID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerID="5a9b1582e15bb65792e7df9a181726aa3fb9f3f8acb6aff76c21e16bc6d385df" exitCode=0 Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.802122 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerDied","Data":"5a9b1582e15bb65792e7df9a181726aa3fb9f3f8acb6aff76c21e16bc6d385df"} Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.802431 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3e5f8bd9-4288-4f1b-9af5-aee6e9236011","Type":"ContainerDied","Data":"8e25af628c01feef64ff3b37b4fff1f72daf154ba26d63389756e75747b590df"} Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.802444 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e25af628c01feef64ff3b37b4fff1f72daf154ba26d63389756e75747b590df" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.812357 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.814122 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.901957 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-config-data\") pod \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.902036 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-log-httpd\") pod \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.902085 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-combined-ca-bundle\") pod \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.902117 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-run-httpd\") pod \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.902206 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgn9p\" (UniqueName: \"kubernetes.io/projected/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-kube-api-access-sgn9p\") pod \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.902327 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-sg-core-conf-yaml\") pod \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.902349 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-scripts\") pod \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\" (UID: \"3e5f8bd9-4288-4f1b-9af5-aee6e9236011\") " Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.902867 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3e5f8bd9-4288-4f1b-9af5-aee6e9236011" (UID: "3e5f8bd9-4288-4f1b-9af5-aee6e9236011"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.903910 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3e5f8bd9-4288-4f1b-9af5-aee6e9236011" (UID: "3e5f8bd9-4288-4f1b-9af5-aee6e9236011"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.908997 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-kube-api-access-sgn9p" (OuterVolumeSpecName: "kube-api-access-sgn9p") pod "3e5f8bd9-4288-4f1b-9af5-aee6e9236011" (UID: "3e5f8bd9-4288-4f1b-9af5-aee6e9236011"). InnerVolumeSpecName "kube-api-access-sgn9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.909103 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-scripts" (OuterVolumeSpecName: "scripts") pod "3e5f8bd9-4288-4f1b-9af5-aee6e9236011" (UID: "3e5f8bd9-4288-4f1b-9af5-aee6e9236011"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:53 crc kubenswrapper[4792]: I1202 19:00:53.962156 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3e5f8bd9-4288-4f1b-9af5-aee6e9236011" (UID: "3e5f8bd9-4288-4f1b-9af5-aee6e9236011"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.006482 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.006540 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.006554 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgn9p\" (UniqueName: \"kubernetes.io/projected/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-kube-api-access-sgn9p\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.006566 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.006578 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.024427 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e5f8bd9-4288-4f1b-9af5-aee6e9236011" (UID: "3e5f8bd9-4288-4f1b-9af5-aee6e9236011"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.033899 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-config-data" (OuterVolumeSpecName: "config-data") pod "3e5f8bd9-4288-4f1b-9af5-aee6e9236011" (UID: "3e5f8bd9-4288-4f1b-9af5-aee6e9236011"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.108968 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.109000 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e5f8bd9-4288-4f1b-9af5-aee6e9236011-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.814316 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.885404 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.898762 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.911897 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:54 crc kubenswrapper[4792]: E1202 19:00:54.912459 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-central-agent" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912480 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-central-agent" Dec 02 19:00:54 crc kubenswrapper[4792]: E1202 19:00:54.912550 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-notification-agent" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912562 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-notification-agent" Dec 02 19:00:54 crc kubenswrapper[4792]: E1202 19:00:54.912587 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="sg-core" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912597 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="sg-core" Dec 02 19:00:54 crc kubenswrapper[4792]: E1202 19:00:54.912647 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="proxy-httpd" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912655 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="proxy-httpd" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912893 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-central-agent" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912928 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="proxy-httpd" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912942 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="ceilometer-notification-agent" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.912960 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" containerName="sg-core" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.917835 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.920807 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.921006 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.921189 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.924920 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.926712 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-log-httpd\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.926828 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7fhn\" (UniqueName: \"kubernetes.io/projected/625232de-fdb5-4851-92e2-3182287f2a64-kube-api-access-w7fhn\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.926868 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-config-data\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.927046 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.927169 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.927248 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.927314 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-scripts\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:54 crc kubenswrapper[4792]: I1202 19:00:54.927350 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-run-httpd\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029638 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-log-httpd\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029686 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7fhn\" (UniqueName: \"kubernetes.io/projected/625232de-fdb5-4851-92e2-3182287f2a64-kube-api-access-w7fhn\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029711 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-config-data\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029752 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029787 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029828 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029868 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-scripts\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.029886 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-run-httpd\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.030388 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-log-httpd\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.030418 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-run-httpd\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.034975 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-config-data\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.037473 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.037918 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.038346 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-scripts\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.039143 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.049395 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7fhn\" (UniqueName: \"kubernetes.io/projected/625232de-fdb5-4851-92e2-3182287f2a64-kube-api-access-w7fhn\") pod \"ceilometer-0\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.272202 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.561620 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e5f8bd9-4288-4f1b-9af5-aee6e9236011" path="/var/lib/kubelet/pods/3e5f8bd9-4288-4f1b-9af5-aee6e9236011/volumes" Dec 02 19:00:55 crc kubenswrapper[4792]: I1202 19:00:55.826674 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:00:55 crc kubenswrapper[4792]: W1202 19:00:55.829713 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod625232de_fdb5_4851_92e2_3182287f2a64.slice/crio-1ff268cccfbf71e18edfa0825553f0695ca5c3891c75b34a7b04c45c35dddf65 WatchSource:0}: Error finding container 1ff268cccfbf71e18edfa0825553f0695ca5c3891c75b34a7b04c45c35dddf65: Status 404 returned error can't find the container with id 1ff268cccfbf71e18edfa0825553f0695ca5c3891c75b34a7b04c45c35dddf65 Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.825624 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.854866 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerStarted","Data":"a0ee2d76b6ecdb541890a8c72f66a46843ea5e1b2d0000a3846ff8a74dd30adc"} Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.854919 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerStarted","Data":"1ff268cccfbf71e18edfa0825553f0695ca5c3891c75b34a7b04c45c35dddf65"} Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.857386 4792 generic.go:334] "Generic (PLEG): container finished" podID="9c321714-c201-4d1e-a0f9-d2fe24983c28" containerID="fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea" exitCode=137 Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.857423 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"9c321714-c201-4d1e-a0f9-d2fe24983c28","Type":"ContainerDied","Data":"fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea"} Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.857447 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"9c321714-c201-4d1e-a0f9-d2fe24983c28","Type":"ContainerDied","Data":"1023b8a1ae65326d99e9812ffc1a5bc40e3d7206a451f4ec96f350dbf1fce119"} Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.857463 4792 scope.go:117] "RemoveContainer" containerID="fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.857591 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.875694 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-config-data\") pod \"9c321714-c201-4d1e-a0f9-d2fe24983c28\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.875832 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvhvg\" (UniqueName: \"kubernetes.io/projected/9c321714-c201-4d1e-a0f9-d2fe24983c28-kube-api-access-jvhvg\") pod \"9c321714-c201-4d1e-a0f9-d2fe24983c28\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.875886 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-combined-ca-bundle\") pod \"9c321714-c201-4d1e-a0f9-d2fe24983c28\" (UID: \"9c321714-c201-4d1e-a0f9-d2fe24983c28\") " Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.881775 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c321714-c201-4d1e-a0f9-d2fe24983c28-kube-api-access-jvhvg" (OuterVolumeSpecName: "kube-api-access-jvhvg") pod "9c321714-c201-4d1e-a0f9-d2fe24983c28" (UID: "9c321714-c201-4d1e-a0f9-d2fe24983c28"). InnerVolumeSpecName "kube-api-access-jvhvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.892160 4792 scope.go:117] "RemoveContainer" containerID="fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea" Dec 02 19:00:56 crc kubenswrapper[4792]: E1202 19:00:56.892730 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea\": container with ID starting with fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea not found: ID does not exist" containerID="fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.892779 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea"} err="failed to get container status \"fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea\": rpc error: code = NotFound desc = could not find container \"fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea\": container with ID starting with fd4d3cd642d4547c4b053ff2b8c9a8a215451173821a6cd00f1a4de5b307e2ea not found: ID does not exist" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.904799 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-config-data" (OuterVolumeSpecName: "config-data") pod "9c321714-c201-4d1e-a0f9-d2fe24983c28" (UID: "9c321714-c201-4d1e-a0f9-d2fe24983c28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.908280 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c321714-c201-4d1e-a0f9-d2fe24983c28" (UID: "9c321714-c201-4d1e-a0f9-d2fe24983c28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.980026 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.980058 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c321714-c201-4d1e-a0f9-d2fe24983c28-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:56 crc kubenswrapper[4792]: I1202 19:00:56.980067 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvhvg\" (UniqueName: \"kubernetes.io/projected/9c321714-c201-4d1e-a0f9-d2fe24983c28-kube-api-access-jvhvg\") on node \"crc\" DevicePath \"\"" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.209645 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.232869 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.264352 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:57 crc kubenswrapper[4792]: E1202 19:00:57.264888 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c321714-c201-4d1e-a0f9-d2fe24983c28" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.264909 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c321714-c201-4d1e-a0f9-d2fe24983c28" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.265313 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c321714-c201-4d1e-a0f9-d2fe24983c28" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.266574 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.269727 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.269876 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.270336 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.274360 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.285061 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.285119 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.285160 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.285211 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcdqp\" (UniqueName: \"kubernetes.io/projected/78a5099a-bc49-427d-b2c5-46adcda0e3e9-kube-api-access-zcdqp\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.285251 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.387248 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.387300 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.387342 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.387399 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcdqp\" (UniqueName: \"kubernetes.io/projected/78a5099a-bc49-427d-b2c5-46adcda0e3e9-kube-api-access-zcdqp\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.387444 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.396313 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.399111 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.399867 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.402037 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a5099a-bc49-427d-b2c5-46adcda0e3e9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.425995 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcdqp\" (UniqueName: \"kubernetes.io/projected/78a5099a-bc49-427d-b2c5-46adcda0e3e9-kube-api-access-zcdqp\") pod \"nova-cell1-novncproxy-0\" (UID: \"78a5099a-bc49-427d-b2c5-46adcda0e3e9\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.551709 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c321714-c201-4d1e-a0f9-d2fe24983c28" path="/var/lib/kubelet/pods/9c321714-c201-4d1e-a0f9-d2fe24983c28/volumes" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.584393 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:00:57 crc kubenswrapper[4792]: I1202 19:00:57.868091 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerStarted","Data":"472be3090099e2f2ba402772a2c51ecd9792486cedc708413e96b8bc66b31e19"} Dec 02 19:00:58 crc kubenswrapper[4792]: I1202 19:00:58.039115 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 19:00:58 crc kubenswrapper[4792]: W1202 19:00:58.041610 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78a5099a_bc49_427d_b2c5_46adcda0e3e9.slice/crio-9ae51e8ffac5047f1585712bdaa36c90f5f378e872d53b895663b1fd1a237613 WatchSource:0}: Error finding container 9ae51e8ffac5047f1585712bdaa36c90f5f378e872d53b895663b1fd1a237613: Status 404 returned error can't find the container with id 9ae51e8ffac5047f1585712bdaa36c90f5f378e872d53b895663b1fd1a237613 Dec 02 19:00:58 crc kubenswrapper[4792]: I1202 19:00:58.113134 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 19:00:58 crc kubenswrapper[4792]: I1202 19:00:58.881984 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"78a5099a-bc49-427d-b2c5-46adcda0e3e9","Type":"ContainerStarted","Data":"5d7a0a74816a24c39333f8bfaadfdd2b4f73ab7e155be848393d693192d58133"} Dec 02 19:00:58 crc kubenswrapper[4792]: I1202 19:00:58.882037 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"78a5099a-bc49-427d-b2c5-46adcda0e3e9","Type":"ContainerStarted","Data":"9ae51e8ffac5047f1585712bdaa36c90f5f378e872d53b895663b1fd1a237613"} Dec 02 19:00:58 crc kubenswrapper[4792]: I1202 19:00:58.884321 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerStarted","Data":"9c7c897131852cb7c188fca2e77d6f1e31fb2090a0c5cf1298c4f016b5bb91d6"} Dec 02 19:00:58 crc kubenswrapper[4792]: I1202 19:00:58.901028 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.9010092520000001 podStartE2EDuration="1.901009252s" podCreationTimestamp="2025-12-02 19:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:00:58.897244464 +0000 UTC m=+1489.670136792" watchObservedRunningTime="2025-12-02 19:00:58.901009252 +0000 UTC m=+1489.673901600" Dec 02 19:00:59 crc kubenswrapper[4792]: I1202 19:00:59.350284 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 19:00:59 crc kubenswrapper[4792]: I1202 19:00:59.351187 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 19:00:59 crc kubenswrapper[4792]: I1202 19:00:59.352846 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 19:00:59 crc kubenswrapper[4792]: I1202 19:00:59.355410 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 19:00:59 crc kubenswrapper[4792]: I1202 19:00:59.894689 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 19:00:59 crc kubenswrapper[4792]: I1202 19:00:59.899592 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.098055 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-w5ctg"] Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.102159 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.132707 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-w5ctg"] Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.151637 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29411701-8bqdq"] Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.153300 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.158673 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzk2j\" (UniqueName: \"kubernetes.io/projected/9b00586f-ffa2-4617-abe4-56758a897416-kube-api-access-tzk2j\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.158717 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-config\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.158887 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.159146 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.159171 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.159372 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.196976 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411701-8bqdq"] Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.260892 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.260931 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261002 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261029 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-fernet-keys\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261047 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzk2j\" (UniqueName: \"kubernetes.io/projected/9b00586f-ffa2-4617-abe4-56758a897416-kube-api-access-tzk2j\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261067 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-config\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261100 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-config-data\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261119 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-combined-ca-bundle\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261143 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqhpg\" (UniqueName: \"kubernetes.io/projected/7ba4a311-3a7b-4575-b183-12bb721e71a1-kube-api-access-wqhpg\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.261168 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.263030 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.263218 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.265992 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.266054 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.269870 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-config\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.278577 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzk2j\" (UniqueName: \"kubernetes.io/projected/9b00586f-ffa2-4617-abe4-56758a897416-kube-api-access-tzk2j\") pod \"dnsmasq-dns-5fd9b586ff-w5ctg\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.362673 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-fernet-keys\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.362765 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-config-data\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.362797 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-combined-ca-bundle\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.362826 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqhpg\" (UniqueName: \"kubernetes.io/projected/7ba4a311-3a7b-4575-b183-12bb721e71a1-kube-api-access-wqhpg\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.367645 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-config-data\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.368101 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-fernet-keys\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.368491 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-combined-ca-bundle\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.381018 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqhpg\" (UniqueName: \"kubernetes.io/projected/7ba4a311-3a7b-4575-b183-12bb721e71a1-kube-api-access-wqhpg\") pod \"keystone-cron-29411701-8bqdq\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.436091 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.483122 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.912625 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerStarted","Data":"181de506876ddb9f887d28700c019861ff4094fbd24c90e9ff178ac0635d9586"} Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.912928 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.943438 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.938382219 podStartE2EDuration="6.943417978s" podCreationTimestamp="2025-12-02 19:00:54 +0000 UTC" firstStartedPulling="2025-12-02 19:00:55.832926368 +0000 UTC m=+1486.605818696" lastFinishedPulling="2025-12-02 19:00:59.837962127 +0000 UTC m=+1490.610854455" observedRunningTime="2025-12-02 19:01:00.935735038 +0000 UTC m=+1491.708627366" watchObservedRunningTime="2025-12-02 19:01:00.943417978 +0000 UTC m=+1491.716310296" Dec 02 19:01:00 crc kubenswrapper[4792]: I1202 19:01:00.987057 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-w5ctg"] Dec 02 19:01:00 crc kubenswrapper[4792]: W1202 19:01:00.988886 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b00586f_ffa2_4617_abe4_56758a897416.slice/crio-730cef0ff54a43e4e61bde2ddaac6da0e4af57635d63b5b2b70f1cf2fcc31646 WatchSource:0}: Error finding container 730cef0ff54a43e4e61bde2ddaac6da0e4af57635d63b5b2b70f1cf2fcc31646: Status 404 returned error can't find the container with id 730cef0ff54a43e4e61bde2ddaac6da0e4af57635d63b5b2b70f1cf2fcc31646 Dec 02 19:01:01 crc kubenswrapper[4792]: I1202 19:01:01.078271 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411701-8bqdq"] Dec 02 19:01:01 crc kubenswrapper[4792]: W1202 19:01:01.092660 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ba4a311_3a7b_4575_b183_12bb721e71a1.slice/crio-5688a51ab97ef2012440f48d4a11621642ca7e62708c0d00114bd1439e7fdd61 WatchSource:0}: Error finding container 5688a51ab97ef2012440f48d4a11621642ca7e62708c0d00114bd1439e7fdd61: Status 404 returned error can't find the container with id 5688a51ab97ef2012440f48d4a11621642ca7e62708c0d00114bd1439e7fdd61 Dec 02 19:01:01 crc kubenswrapper[4792]: I1202 19:01:01.968040 4792 generic.go:334] "Generic (PLEG): container finished" podID="9b00586f-ffa2-4617-abe4-56758a897416" containerID="6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a" exitCode=0 Dec 02 19:01:01 crc kubenswrapper[4792]: I1202 19:01:01.968162 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" event={"ID":"9b00586f-ffa2-4617-abe4-56758a897416","Type":"ContainerDied","Data":"6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a"} Dec 02 19:01:01 crc kubenswrapper[4792]: I1202 19:01:01.968577 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" event={"ID":"9b00586f-ffa2-4617-abe4-56758a897416","Type":"ContainerStarted","Data":"730cef0ff54a43e4e61bde2ddaac6da0e4af57635d63b5b2b70f1cf2fcc31646"} Dec 02 19:01:01 crc kubenswrapper[4792]: I1202 19:01:01.972079 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411701-8bqdq" event={"ID":"7ba4a311-3a7b-4575-b183-12bb721e71a1","Type":"ContainerStarted","Data":"ce65e12b20c798b7e0d3579db90f1947c29451147f31ff7443dba18b25644cac"} Dec 02 19:01:01 crc kubenswrapper[4792]: I1202 19:01:01.972115 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411701-8bqdq" event={"ID":"7ba4a311-3a7b-4575-b183-12bb721e71a1","Type":"ContainerStarted","Data":"5688a51ab97ef2012440f48d4a11621642ca7e62708c0d00114bd1439e7fdd61"} Dec 02 19:01:02 crc kubenswrapper[4792]: I1202 19:01:02.014672 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29411701-8bqdq" podStartSLOduration=2.014652118 podStartE2EDuration="2.014652118s" podCreationTimestamp="2025-12-02 19:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:01:02.000856669 +0000 UTC m=+1492.773749007" watchObservedRunningTime="2025-12-02 19:01:02.014652118 +0000 UTC m=+1492.787544446" Dec 02 19:01:02 crc kubenswrapper[4792]: I1202 19:01:02.584654 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:01:02 crc kubenswrapper[4792]: I1202 19:01:02.730045 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:02 crc kubenswrapper[4792]: I1202 19:01:02.981908 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" event={"ID":"9b00586f-ffa2-4617-abe4-56758a897416","Type":"ContainerStarted","Data":"e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa"} Dec 02 19:01:02 crc kubenswrapper[4792]: I1202 19:01:02.982068 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-log" containerID="cri-o://f1d8db753027933f929de26c22a72e4fbd1e779d9a3517053268b5d9ff2b4c45" gracePeriod=30 Dec 02 19:01:02 crc kubenswrapper[4792]: I1202 19:01:02.982103 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-api" containerID="cri-o://8c0557f540807fc38260cb813f4ce2cdda3c8403de6ac4671676bb6bb72c69b3" gracePeriod=30 Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.626317 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" podStartSLOduration=3.62630015 podStartE2EDuration="3.62630015s" podCreationTimestamp="2025-12-02 19:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:01:03.005720342 +0000 UTC m=+1493.778612670" watchObservedRunningTime="2025-12-02 19:01:03.62630015 +0000 UTC m=+1494.399192478" Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.634838 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.993766 4792 generic.go:334] "Generic (PLEG): container finished" podID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerID="f1d8db753027933f929de26c22a72e4fbd1e779d9a3517053268b5d9ff2b4c45" exitCode=143 Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.993973 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"794f62c5-7842-4616-aa8e-c968e23bc67d","Type":"ContainerDied","Data":"f1d8db753027933f929de26c22a72e4fbd1e779d9a3517053268b5d9ff2b4c45"} Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.996375 4792 generic.go:334] "Generic (PLEG): container finished" podID="7ba4a311-3a7b-4575-b183-12bb721e71a1" containerID="ce65e12b20c798b7e0d3579db90f1947c29451147f31ff7443dba18b25644cac" exitCode=0 Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.996455 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411701-8bqdq" event={"ID":"7ba4a311-3a7b-4575-b183-12bb721e71a1","Type":"ContainerDied","Data":"ce65e12b20c798b7e0d3579db90f1947c29451147f31ff7443dba18b25644cac"} Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.996720 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.996945 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-central-agent" containerID="cri-o://a0ee2d76b6ecdb541890a8c72f66a46843ea5e1b2d0000a3846ff8a74dd30adc" gracePeriod=30 Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.996968 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="sg-core" containerID="cri-o://9c7c897131852cb7c188fca2e77d6f1e31fb2090a0c5cf1298c4f016b5bb91d6" gracePeriod=30 Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.996983 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="proxy-httpd" containerID="cri-o://181de506876ddb9f887d28700c019861ff4094fbd24c90e9ff178ac0635d9586" gracePeriod=30 Dec 02 19:01:03 crc kubenswrapper[4792]: I1202 19:01:03.997099 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-notification-agent" containerID="cri-o://472be3090099e2f2ba402772a2c51ecd9792486cedc708413e96b8bc66b31e19" gracePeriod=30 Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013330 4792 generic.go:334] "Generic (PLEG): container finished" podID="625232de-fdb5-4851-92e2-3182287f2a64" containerID="181de506876ddb9f887d28700c019861ff4094fbd24c90e9ff178ac0635d9586" exitCode=0 Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013725 4792 generic.go:334] "Generic (PLEG): container finished" podID="625232de-fdb5-4851-92e2-3182287f2a64" containerID="9c7c897131852cb7c188fca2e77d6f1e31fb2090a0c5cf1298c4f016b5bb91d6" exitCode=2 Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013744 4792 generic.go:334] "Generic (PLEG): container finished" podID="625232de-fdb5-4851-92e2-3182287f2a64" containerID="472be3090099e2f2ba402772a2c51ecd9792486cedc708413e96b8bc66b31e19" exitCode=0 Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013764 4792 generic.go:334] "Generic (PLEG): container finished" podID="625232de-fdb5-4851-92e2-3182287f2a64" containerID="a0ee2d76b6ecdb541890a8c72f66a46843ea5e1b2d0000a3846ff8a74dd30adc" exitCode=0 Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013428 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerDied","Data":"181de506876ddb9f887d28700c019861ff4094fbd24c90e9ff178ac0635d9586"} Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013865 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerDied","Data":"9c7c897131852cb7c188fca2e77d6f1e31fb2090a0c5cf1298c4f016b5bb91d6"} Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerDied","Data":"472be3090099e2f2ba402772a2c51ecd9792486cedc708413e96b8bc66b31e19"} Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.013918 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerDied","Data":"a0ee2d76b6ecdb541890a8c72f66a46843ea5e1b2d0000a3846ff8a74dd30adc"} Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.677101 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.685964 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.787946 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-combined-ca-bundle\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788029 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-log-httpd\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788081 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqhpg\" (UniqueName: \"kubernetes.io/projected/7ba4a311-3a7b-4575-b183-12bb721e71a1-kube-api-access-wqhpg\") pod \"7ba4a311-3a7b-4575-b183-12bb721e71a1\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788158 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-config-data\") pod \"7ba4a311-3a7b-4575-b183-12bb721e71a1\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788210 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-combined-ca-bundle\") pod \"7ba4a311-3a7b-4575-b183-12bb721e71a1\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788264 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-config-data\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788369 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-ceilometer-tls-certs\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788438 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7fhn\" (UniqueName: \"kubernetes.io/projected/625232de-fdb5-4851-92e2-3182287f2a64-kube-api-access-w7fhn\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788504 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-sg-core-conf-yaml\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788562 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-fernet-keys\") pod \"7ba4a311-3a7b-4575-b183-12bb721e71a1\" (UID: \"7ba4a311-3a7b-4575-b183-12bb721e71a1\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788581 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-scripts\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.788603 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-run-httpd\") pod \"625232de-fdb5-4851-92e2-3182287f2a64\" (UID: \"625232de-fdb5-4851-92e2-3182287f2a64\") " Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.789438 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.790940 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.795288 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ba4a311-3a7b-4575-b183-12bb721e71a1-kube-api-access-wqhpg" (OuterVolumeSpecName: "kube-api-access-wqhpg") pod "7ba4a311-3a7b-4575-b183-12bb721e71a1" (UID: "7ba4a311-3a7b-4575-b183-12bb721e71a1"). InnerVolumeSpecName "kube-api-access-wqhpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.795914 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-scripts" (OuterVolumeSpecName: "scripts") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.797891 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7ba4a311-3a7b-4575-b183-12bb721e71a1" (UID: "7ba4a311-3a7b-4575-b183-12bb721e71a1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.806311 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/625232de-fdb5-4851-92e2-3182287f2a64-kube-api-access-w7fhn" (OuterVolumeSpecName: "kube-api-access-w7fhn") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "kube-api-access-w7fhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.837512 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ba4a311-3a7b-4575-b183-12bb721e71a1" (UID: "7ba4a311-3a7b-4575-b183-12bb721e71a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.872705 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.891979 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7fhn\" (UniqueName: \"kubernetes.io/projected/625232de-fdb5-4851-92e2-3182287f2a64-kube-api-access-w7fhn\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.892007 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.892016 4792 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.892024 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.892032 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.892040 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/625232de-fdb5-4851-92e2-3182287f2a64-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.892074 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqhpg\" (UniqueName: \"kubernetes.io/projected/7ba4a311-3a7b-4575-b183-12bb721e71a1-kube-api-access-wqhpg\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.892082 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.896288 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-config-data" (OuterVolumeSpecName: "config-data") pod "7ba4a311-3a7b-4575-b183-12bb721e71a1" (UID: "7ba4a311-3a7b-4575-b183-12bb721e71a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.899557 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.928273 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.957906 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-config-data" (OuterVolumeSpecName: "config-data") pod "625232de-fdb5-4851-92e2-3182287f2a64" (UID: "625232de-fdb5-4851-92e2-3182287f2a64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.993639 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.993667 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.993677 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba4a311-3a7b-4575-b183-12bb721e71a1-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:05 crc kubenswrapper[4792]: I1202 19:01:05.993686 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/625232de-fdb5-4851-92e2-3182287f2a64-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.025009 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"625232de-fdb5-4851-92e2-3182287f2a64","Type":"ContainerDied","Data":"1ff268cccfbf71e18edfa0825553f0695ca5c3891c75b34a7b04c45c35dddf65"} Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.025098 4792 scope.go:117] "RemoveContainer" containerID="181de506876ddb9f887d28700c019861ff4094fbd24c90e9ff178ac0635d9586" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.025146 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.026543 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411701-8bqdq" event={"ID":"7ba4a311-3a7b-4575-b183-12bb721e71a1","Type":"ContainerDied","Data":"5688a51ab97ef2012440f48d4a11621642ca7e62708c0d00114bd1439e7fdd61"} Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.026572 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5688a51ab97ef2012440f48d4a11621642ca7e62708c0d00114bd1439e7fdd61" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.026644 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411701-8bqdq" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.047224 4792 scope.go:117] "RemoveContainer" containerID="9c7c897131852cb7c188fca2e77d6f1e31fb2090a0c5cf1298c4f016b5bb91d6" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.075998 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.082449 4792 scope.go:117] "RemoveContainer" containerID="472be3090099e2f2ba402772a2c51ecd9792486cedc708413e96b8bc66b31e19" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.087621 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.109706 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:06 crc kubenswrapper[4792]: E1202 19:01:06.110270 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-central-agent" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110291 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-central-agent" Dec 02 19:01:06 crc kubenswrapper[4792]: E1202 19:01:06.110332 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba4a311-3a7b-4575-b183-12bb721e71a1" containerName="keystone-cron" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110341 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba4a311-3a7b-4575-b183-12bb721e71a1" containerName="keystone-cron" Dec 02 19:01:06 crc kubenswrapper[4792]: E1202 19:01:06.110359 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="proxy-httpd" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110367 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="proxy-httpd" Dec 02 19:01:06 crc kubenswrapper[4792]: E1202 19:01:06.110391 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-notification-agent" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110400 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-notification-agent" Dec 02 19:01:06 crc kubenswrapper[4792]: E1202 19:01:06.110419 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="sg-core" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110428 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="sg-core" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110671 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="proxy-httpd" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110714 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba4a311-3a7b-4575-b183-12bb721e71a1" containerName="keystone-cron" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110728 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-notification-agent" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110740 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="sg-core" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.110756 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="625232de-fdb5-4851-92e2-3182287f2a64" containerName="ceilometer-central-agent" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.113253 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.113689 4792 scope.go:117] "RemoveContainer" containerID="a0ee2d76b6ecdb541890a8c72f66a46843ea5e1b2d0000a3846ff8a74dd30adc" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.117780 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.118011 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.118032 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.118127 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.197809 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-config-data\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.197878 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-log-httpd\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.197927 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.197953 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.198054 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2k5v\" (UniqueName: \"kubernetes.io/projected/592ba145-17cf-4929-ad52-a324bd87fe0a-kube-api-access-c2k5v\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.198119 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-scripts\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.198151 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-run-httpd\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.198459 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300308 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300630 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-config-data\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300670 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-log-httpd\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300709 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300730 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300755 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2k5v\" (UniqueName: \"kubernetes.io/projected/592ba145-17cf-4929-ad52-a324bd87fe0a-kube-api-access-c2k5v\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300792 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-scripts\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.300814 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-run-httpd\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.301330 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-run-httpd\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.303502 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-log-httpd\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.306872 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.307261 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.309649 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-config-data\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.310206 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-scripts\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.310288 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.322353 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2k5v\" (UniqueName: \"kubernetes.io/projected/592ba145-17cf-4929-ad52-a324bd87fe0a-kube-api-access-c2k5v\") pod \"ceilometer-0\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.442070 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:06 crc kubenswrapper[4792]: I1202 19:01:06.962710 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.045861 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerStarted","Data":"7f7e427e7aa3a867188bda3f1c806cfad164da1a3a6197d04a138f089a30db4f"} Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.052841 4792 generic.go:334] "Generic (PLEG): container finished" podID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerID="8c0557f540807fc38260cb813f4ce2cdda3c8403de6ac4671676bb6bb72c69b3" exitCode=0 Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.052883 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"794f62c5-7842-4616-aa8e-c968e23bc67d","Type":"ContainerDied","Data":"8c0557f540807fc38260cb813f4ce2cdda3c8403de6ac4671676bb6bb72c69b3"} Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.159988 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.216195 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-config-data\") pod \"794f62c5-7842-4616-aa8e-c968e23bc67d\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.216295 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f62c5-7842-4616-aa8e-c968e23bc67d-logs\") pod \"794f62c5-7842-4616-aa8e-c968e23bc67d\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.216405 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mqk6\" (UniqueName: \"kubernetes.io/projected/794f62c5-7842-4616-aa8e-c968e23bc67d-kube-api-access-6mqk6\") pod \"794f62c5-7842-4616-aa8e-c968e23bc67d\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.216560 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-combined-ca-bundle\") pod \"794f62c5-7842-4616-aa8e-c968e23bc67d\" (UID: \"794f62c5-7842-4616-aa8e-c968e23bc67d\") " Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.216791 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/794f62c5-7842-4616-aa8e-c968e23bc67d-logs" (OuterVolumeSpecName: "logs") pod "794f62c5-7842-4616-aa8e-c968e23bc67d" (UID: "794f62c5-7842-4616-aa8e-c968e23bc67d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.217053 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f62c5-7842-4616-aa8e-c968e23bc67d-logs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.223097 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/794f62c5-7842-4616-aa8e-c968e23bc67d-kube-api-access-6mqk6" (OuterVolumeSpecName: "kube-api-access-6mqk6") pod "794f62c5-7842-4616-aa8e-c968e23bc67d" (UID: "794f62c5-7842-4616-aa8e-c968e23bc67d"). InnerVolumeSpecName "kube-api-access-6mqk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.256796 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "794f62c5-7842-4616-aa8e-c968e23bc67d" (UID: "794f62c5-7842-4616-aa8e-c968e23bc67d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.276415 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-config-data" (OuterVolumeSpecName: "config-data") pod "794f62c5-7842-4616-aa8e-c968e23bc67d" (UID: "794f62c5-7842-4616-aa8e-c968e23bc67d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.318785 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mqk6\" (UniqueName: \"kubernetes.io/projected/794f62c5-7842-4616-aa8e-c968e23bc67d-kube-api-access-6mqk6\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.318808 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.318818 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f62c5-7842-4616-aa8e-c968e23bc67d-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.550411 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="625232de-fdb5-4851-92e2-3182287f2a64" path="/var/lib/kubelet/pods/625232de-fdb5-4851-92e2-3182287f2a64/volumes" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.585152 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:01:07 crc kubenswrapper[4792]: I1202 19:01:07.610922 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.076076 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.078015 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"794f62c5-7842-4616-aa8e-c968e23bc67d","Type":"ContainerDied","Data":"de162f77aefbdc6bf9a16f6d1c249d102f0e6796647ab45bba1efb0503a79696"} Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.078083 4792 scope.go:117] "RemoveContainer" containerID="8c0557f540807fc38260cb813f4ce2cdda3c8403de6ac4671676bb6bb72c69b3" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.087174 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.087247 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.110967 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.132744 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.154066 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.180759 4792 scope.go:117] "RemoveContainer" containerID="f1d8db753027933f929de26c22a72e4fbd1e779d9a3517053268b5d9ff2b4c45" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.191136 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:08 crc kubenswrapper[4792]: E1202 19:01:08.191986 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-api" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.192007 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-api" Dec 02 19:01:08 crc kubenswrapper[4792]: E1202 19:01:08.192034 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-log" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.192041 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-log" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.192243 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-log" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.192274 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" containerName="nova-api-api" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.193472 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.203767 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.204923 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.206540 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.242648 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.273604 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm8zr\" (UniqueName: \"kubernetes.io/projected/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-kube-api-access-rm8zr\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.273689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.273721 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.273752 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.273805 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-logs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.273849 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-config-data\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.350005 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-6nrq2"] Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.351340 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.354209 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.359137 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.368108 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-6nrq2"] Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.376786 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.376836 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.376869 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.376919 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-logs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.376945 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-config-data\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.377002 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm8zr\" (UniqueName: \"kubernetes.io/projected/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-kube-api-access-rm8zr\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.377679 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-logs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.381276 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.381825 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.382218 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.384682 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-config-data\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.405400 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm8zr\" (UniqueName: \"kubernetes.io/projected/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-kube-api-access-rm8zr\") pod \"nova-api-0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.477966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-scripts\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.478054 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.478088 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmfbp\" (UniqueName: \"kubernetes.io/projected/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-kube-api-access-dmfbp\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.478157 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-config-data\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.532734 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.580074 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-config-data\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.581168 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-scripts\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.581299 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.581345 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmfbp\" (UniqueName: \"kubernetes.io/projected/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-kube-api-access-dmfbp\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.586317 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.587563 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-config-data\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.588625 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-scripts\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.605049 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmfbp\" (UniqueName: \"kubernetes.io/projected/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-kube-api-access-dmfbp\") pod \"nova-cell1-cell-mapping-6nrq2\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:08 crc kubenswrapper[4792]: I1202 19:01:08.679509 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:09 crc kubenswrapper[4792]: W1202 19:01:09.069633 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0813b309_ff0c_4dc1_9ff0_66737db0dfe0.slice/crio-d96e176ee1da0f75ecf709da33049b38eec5fd958073525ac4593014570022aa WatchSource:0}: Error finding container d96e176ee1da0f75ecf709da33049b38eec5fd958073525ac4593014570022aa: Status 404 returned error can't find the container with id d96e176ee1da0f75ecf709da33049b38eec5fd958073525ac4593014570022aa Dec 02 19:01:09 crc kubenswrapper[4792]: I1202 19:01:09.074669 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:09 crc kubenswrapper[4792]: I1202 19:01:09.090746 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0813b309-ff0c-4dc1-9ff0-66737db0dfe0","Type":"ContainerStarted","Data":"d96e176ee1da0f75ecf709da33049b38eec5fd958073525ac4593014570022aa"} Dec 02 19:01:09 crc kubenswrapper[4792]: I1202 19:01:09.097085 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerStarted","Data":"903682506d1219b834fcdcbfc534d747e1fb47d76b7d588807598497b8dde7bb"} Dec 02 19:01:09 crc kubenswrapper[4792]: I1202 19:01:09.097118 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerStarted","Data":"70363146d7872bb5e3f6ff7639671d03e363a4f84a59b58b8b0e742324e40560"} Dec 02 19:01:09 crc kubenswrapper[4792]: I1202 19:01:09.274292 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-6nrq2"] Dec 02 19:01:09 crc kubenswrapper[4792]: I1202 19:01:09.556239 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="794f62c5-7842-4616-aa8e-c968e23bc67d" path="/var/lib/kubelet/pods/794f62c5-7842-4616-aa8e-c968e23bc67d/volumes" Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.110233 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6nrq2" event={"ID":"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914","Type":"ContainerStarted","Data":"14faba9f1d40d34af783d176eeccdc6188d9f4eae94ea9eebb44eb873cfae59d"} Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.110608 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6nrq2" event={"ID":"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914","Type":"ContainerStarted","Data":"8f216d86d0287ab6f4f3716379f7109eaa2738edf422665bf7f93e76d29d712b"} Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.118403 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerStarted","Data":"22d608f98046c7321e11f65e21e6a3cd1b95dbcab69bf214abafd6aacf9991a0"} Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.120262 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0813b309-ff0c-4dc1-9ff0-66737db0dfe0","Type":"ContainerStarted","Data":"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9"} Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.120301 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0813b309-ff0c-4dc1-9ff0-66737db0dfe0","Type":"ContainerStarted","Data":"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6"} Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.130665 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-6nrq2" podStartSLOduration=2.130642976 podStartE2EDuration="2.130642976s" podCreationTimestamp="2025-12-02 19:01:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:01:10.128443469 +0000 UTC m=+1500.901335837" watchObservedRunningTime="2025-12-02 19:01:10.130642976 +0000 UTC m=+1500.903535304" Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.169294 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.169273502 podStartE2EDuration="2.169273502s" podCreationTimestamp="2025-12-02 19:01:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:01:10.160901714 +0000 UTC m=+1500.933794082" watchObservedRunningTime="2025-12-02 19:01:10.169273502 +0000 UTC m=+1500.942165840" Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.437789 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.515665 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-lftp9"] Dec 02 19:01:10 crc kubenswrapper[4792]: I1202 19:01:10.516119 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78cd565959-lftp9" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerName="dnsmasq-dns" containerID="cri-o://c033e5f4a3c35edec4a7596610fee866738e841d71ea312bd5fb797f51f7a2e0" gracePeriod=10 Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.170028 4792 generic.go:334] "Generic (PLEG): container finished" podID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerID="c033e5f4a3c35edec4a7596610fee866738e841d71ea312bd5fb797f51f7a2e0" exitCode=0 Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.171313 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-lftp9" event={"ID":"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0","Type":"ContainerDied","Data":"c033e5f4a3c35edec4a7596610fee866738e841d71ea312bd5fb797f51f7a2e0"} Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.171343 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-lftp9" event={"ID":"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0","Type":"ContainerDied","Data":"83a5343cccc538286f4eb6e56d5f40919a26f396f534e68ec8e991597dfd7065"} Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.171354 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83a5343cccc538286f4eb6e56d5f40919a26f396f534e68ec8e991597dfd7065" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.231928 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.366898 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-swift-storage-0\") pod \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.366988 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-sb\") pod \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.367018 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-config\") pod \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.367060 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-nb\") pod \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.367160 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-svc\") pod \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.367219 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzphp\" (UniqueName: \"kubernetes.io/projected/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-kube-api-access-hzphp\") pod \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\" (UID: \"92187bf7-8ae3-4a53-9e20-f1d31f44f0d0\") " Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.370755 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-kube-api-access-hzphp" (OuterVolumeSpecName: "kube-api-access-hzphp") pod "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" (UID: "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0"). InnerVolumeSpecName "kube-api-access-hzphp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.422855 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-config" (OuterVolumeSpecName: "config") pod "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" (UID: "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.422926 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" (UID: "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.424074 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" (UID: "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.429567 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" (UID: "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.441706 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" (UID: "92187bf7-8ae3-4a53-9e20-f1d31f44f0d0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.470091 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.470128 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.470145 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-config\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.470156 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.470166 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:11 crc kubenswrapper[4792]: I1202 19:01:11.470174 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzphp\" (UniqueName: \"kubernetes.io/projected/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0-kube-api-access-hzphp\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:12 crc kubenswrapper[4792]: I1202 19:01:12.181489 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-lftp9" Dec 02 19:01:12 crc kubenswrapper[4792]: I1202 19:01:12.181501 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerStarted","Data":"afdb78c76a85146d971da39dd995b42e691b0b459700c2087581bd1b0dd91538"} Dec 02 19:01:12 crc kubenswrapper[4792]: I1202 19:01:12.210315 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.149379201 podStartE2EDuration="6.210293961s" podCreationTimestamp="2025-12-02 19:01:06 +0000 UTC" firstStartedPulling="2025-12-02 19:01:06.978881897 +0000 UTC m=+1497.751774225" lastFinishedPulling="2025-12-02 19:01:11.039796647 +0000 UTC m=+1501.812688985" observedRunningTime="2025-12-02 19:01:12.199083879 +0000 UTC m=+1502.971976227" watchObservedRunningTime="2025-12-02 19:01:12.210293961 +0000 UTC m=+1502.983186299" Dec 02 19:01:12 crc kubenswrapper[4792]: I1202 19:01:12.225932 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-lftp9"] Dec 02 19:01:12 crc kubenswrapper[4792]: I1202 19:01:12.239649 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-lftp9"] Dec 02 19:01:13 crc kubenswrapper[4792]: I1202 19:01:13.213139 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 19:01:13 crc kubenswrapper[4792]: I1202 19:01:13.551515 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" path="/var/lib/kubelet/pods/92187bf7-8ae3-4a53-9e20-f1d31f44f0d0/volumes" Dec 02 19:01:14 crc kubenswrapper[4792]: I1202 19:01:14.227548 4792 generic.go:334] "Generic (PLEG): container finished" podID="25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" containerID="14faba9f1d40d34af783d176eeccdc6188d9f4eae94ea9eebb44eb873cfae59d" exitCode=0 Dec 02 19:01:14 crc kubenswrapper[4792]: I1202 19:01:14.228134 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6nrq2" event={"ID":"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914","Type":"ContainerDied","Data":"14faba9f1d40d34af783d176eeccdc6188d9f4eae94ea9eebb44eb873cfae59d"} Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.661336 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.731577 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-78cd565959-lftp9" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.214:5353: i/o timeout" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.762377 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-combined-ca-bundle\") pod \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.762773 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-config-data\") pod \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.762941 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmfbp\" (UniqueName: \"kubernetes.io/projected/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-kube-api-access-dmfbp\") pod \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.763450 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-scripts\") pod \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\" (UID: \"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914\") " Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.770700 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-scripts" (OuterVolumeSpecName: "scripts") pod "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" (UID: "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.771028 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-kube-api-access-dmfbp" (OuterVolumeSpecName: "kube-api-access-dmfbp") pod "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" (UID: "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914"). InnerVolumeSpecName "kube-api-access-dmfbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.795640 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-config-data" (OuterVolumeSpecName: "config-data") pod "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" (UID: "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.804438 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" (UID: "25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.865908 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.866093 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.866155 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:15 crc kubenswrapper[4792]: I1202 19:01:15.866214 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmfbp\" (UniqueName: \"kubernetes.io/projected/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914-kube-api-access-dmfbp\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.255630 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6nrq2" event={"ID":"25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914","Type":"ContainerDied","Data":"8f216d86d0287ab6f4f3716379f7109eaa2738edf422665bf7f93e76d29d712b"} Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.255700 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f216d86d0287ab6f4f3716379f7109eaa2738edf422665bf7f93e76d29d712b" Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.255751 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6nrq2" Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.448038 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.448273 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-log" containerID="cri-o://df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6" gracePeriod=30 Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.448391 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-api" containerID="cri-o://6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9" gracePeriod=30 Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.478452 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.478703 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="89ec05ac-1237-4cfd-a36e-59bc6933890e" containerName="nova-scheduler-scheduler" containerID="cri-o://79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" gracePeriod=30 Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.490130 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.490362 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-log" containerID="cri-o://0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2" gracePeriod=30 Dec 02 19:01:16 crc kubenswrapper[4792]: I1202 19:01:16.490415 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-metadata" containerID="cri-o://dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37" gracePeriod=30 Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.133764 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.207461 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-public-tls-certs\") pod \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.207581 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-internal-tls-certs\") pod \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.207729 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-config-data\") pod \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.207756 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-combined-ca-bundle\") pod \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.207785 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm8zr\" (UniqueName: \"kubernetes.io/projected/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-kube-api-access-rm8zr\") pod \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.207831 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-logs\") pod \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\" (UID: \"0813b309-ff0c-4dc1-9ff0-66737db0dfe0\") " Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.209225 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-logs" (OuterVolumeSpecName: "logs") pod "0813b309-ff0c-4dc1-9ff0-66737db0dfe0" (UID: "0813b309-ff0c-4dc1-9ff0-66737db0dfe0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.223951 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-kube-api-access-rm8zr" (OuterVolumeSpecName: "kube-api-access-rm8zr") pod "0813b309-ff0c-4dc1-9ff0-66737db0dfe0" (UID: "0813b309-ff0c-4dc1-9ff0-66737db0dfe0"). InnerVolumeSpecName "kube-api-access-rm8zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.240879 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0813b309-ff0c-4dc1-9ff0-66737db0dfe0" (UID: "0813b309-ff0c-4dc1-9ff0-66737db0dfe0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.245002 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-config-data" (OuterVolumeSpecName: "config-data") pod "0813b309-ff0c-4dc1-9ff0-66737db0dfe0" (UID: "0813b309-ff0c-4dc1-9ff0-66737db0dfe0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.262456 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0813b309-ff0c-4dc1-9ff0-66737db0dfe0" (UID: "0813b309-ff0c-4dc1-9ff0-66737db0dfe0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.269986 4792 generic.go:334] "Generic (PLEG): container finished" podID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerID="6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9" exitCode=0 Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.270014 4792 generic.go:334] "Generic (PLEG): container finished" podID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerID="df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6" exitCode=143 Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.270068 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.270198 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0813b309-ff0c-4dc1-9ff0-66737db0dfe0","Type":"ContainerDied","Data":"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9"} Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.270306 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0813b309-ff0c-4dc1-9ff0-66737db0dfe0","Type":"ContainerDied","Data":"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6"} Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.270378 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0813b309-ff0c-4dc1-9ff0-66737db0dfe0","Type":"ContainerDied","Data":"d96e176ee1da0f75ecf709da33049b38eec5fd958073525ac4593014570022aa"} Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.270385 4792 scope.go:117] "RemoveContainer" containerID="6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.273303 4792 generic.go:334] "Generic (PLEG): container finished" podID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerID="0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2" exitCode=143 Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.273336 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c128c153-5ac8-4dd5-9952-4ad8960e6525","Type":"ContainerDied","Data":"0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2"} Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.287745 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0813b309-ff0c-4dc1-9ff0-66737db0dfe0" (UID: "0813b309-ff0c-4dc1-9ff0-66737db0dfe0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.310104 4792 scope.go:117] "RemoveContainer" containerID="df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.311014 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.311039 4792 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.311049 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.311063 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.311072 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm8zr\" (UniqueName: \"kubernetes.io/projected/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-kube-api-access-rm8zr\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.311080 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0813b309-ff0c-4dc1-9ff0-66737db0dfe0-logs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.328648 4792 scope.go:117] "RemoveContainer" containerID="6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9" Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.329110 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9\": container with ID starting with 6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9 not found: ID does not exist" containerID="6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.329143 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9"} err="failed to get container status \"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9\": rpc error: code = NotFound desc = could not find container \"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9\": container with ID starting with 6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9 not found: ID does not exist" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.329166 4792 scope.go:117] "RemoveContainer" containerID="df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6" Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.329997 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6\": container with ID starting with df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6 not found: ID does not exist" containerID="df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.330098 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6"} err="failed to get container status \"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6\": rpc error: code = NotFound desc = could not find container \"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6\": container with ID starting with df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6 not found: ID does not exist" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.330113 4792 scope.go:117] "RemoveContainer" containerID="6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.330576 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9"} err="failed to get container status \"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9\": rpc error: code = NotFound desc = could not find container \"6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9\": container with ID starting with 6e775b8b39899f5e16ecf758a41f2271664854b1774c70ffc1a54dd6044f14f9 not found: ID does not exist" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.330633 4792 scope.go:117] "RemoveContainer" containerID="df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.330940 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6"} err="failed to get container status \"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6\": rpc error: code = NotFound desc = could not find container \"df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6\": container with ID starting with df146e3f72276a8f5068fc7009e24cbe1bbecb109e16ccd31091bbc81f3f06b6 not found: ID does not exist" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.604790 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.619907 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.637197 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.637710 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-log" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.637734 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-log" Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.637759 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerName="dnsmasq-dns" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.637768 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerName="dnsmasq-dns" Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.637798 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" containerName="nova-manage" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.637807 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" containerName="nova-manage" Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.637823 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-api" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.637830 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-api" Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.637853 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerName="init" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.637861 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerName="init" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.638084 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="92187bf7-8ae3-4a53-9e20-f1d31f44f0d0" containerName="dnsmasq-dns" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.638127 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-log" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.638145 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" containerName="nova-manage" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.638162 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" containerName="nova-api-api" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.639693 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.642022 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.645538 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.646759 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.664420 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.719262 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-public-tls-certs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.719399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq6zb\" (UniqueName: \"kubernetes.io/projected/6ffde9a1-e11f-4216-890b-7992f6e1b84c-kube-api-access-nq6zb\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.719433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.719499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-config-data\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.719563 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.719638 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ffde9a1-e11f-4216-890b-7992f6e1b84c-logs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.821359 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-public-tls-certs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.821422 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq6zb\" (UniqueName: \"kubernetes.io/projected/6ffde9a1-e11f-4216-890b-7992f6e1b84c-kube-api-access-nq6zb\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.821440 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.821470 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-config-data\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.821498 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.821546 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ffde9a1-e11f-4216-890b-7992f6e1b84c-logs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.822042 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ffde9a1-e11f-4216-890b-7992f6e1b84c-logs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.826441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.826650 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-public-tls-certs\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.826699 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-config-data\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.827027 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ffde9a1-e11f-4216-890b-7992f6e1b84c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.845783 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq6zb\" (UniqueName: \"kubernetes.io/projected/6ffde9a1-e11f-4216-890b-7992f6e1b84c-kube-api-access-nq6zb\") pod \"nova-api-0\" (UID: \"6ffde9a1-e11f-4216-890b-7992f6e1b84c\") " pod="openstack/nova-api-0" Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.936768 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.938718 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.940243 4792 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 19:01:17 crc kubenswrapper[4792]: E1202 19:01:17.940308 4792 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="89ec05ac-1237-4cfd-a36e-59bc6933890e" containerName="nova-scheduler-scheduler" Dec 02 19:01:17 crc kubenswrapper[4792]: I1202 19:01:17.996016 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 19:01:18 crc kubenswrapper[4792]: W1202 19:01:18.524599 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ffde9a1_e11f_4216_890b_7992f6e1b84c.slice/crio-a3935b66e01ebacad07ac8a8dcd21537c0a8c459959b343e55305654b6085f41 WatchSource:0}: Error finding container a3935b66e01ebacad07ac8a8dcd21537c0a8c459959b343e55305654b6085f41: Status 404 returned error can't find the container with id a3935b66e01ebacad07ac8a8dcd21537c0a8c459959b343e55305654b6085f41 Dec 02 19:01:18 crc kubenswrapper[4792]: I1202 19:01:18.528766 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 19:01:19 crc kubenswrapper[4792]: I1202 19:01:19.297303 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ffde9a1-e11f-4216-890b-7992f6e1b84c","Type":"ContainerStarted","Data":"0538058282e4ee58134f1ff1145b46a15a9232656a4d3515dc141f1e4d49bd6e"} Dec 02 19:01:19 crc kubenswrapper[4792]: I1202 19:01:19.297988 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ffde9a1-e11f-4216-890b-7992f6e1b84c","Type":"ContainerStarted","Data":"2bd31df46512805cc05fb14df16666c03234ffb505d6b4f99f4fdf95a180e8ee"} Dec 02 19:01:19 crc kubenswrapper[4792]: I1202 19:01:19.298004 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6ffde9a1-e11f-4216-890b-7992f6e1b84c","Type":"ContainerStarted","Data":"a3935b66e01ebacad07ac8a8dcd21537c0a8c459959b343e55305654b6085f41"} Dec 02 19:01:19 crc kubenswrapper[4792]: I1202 19:01:19.327389 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.327368631 podStartE2EDuration="2.327368631s" podCreationTimestamp="2025-12-02 19:01:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:01:19.315974724 +0000 UTC m=+1510.088867072" watchObservedRunningTime="2025-12-02 19:01:19.327368631 +0000 UTC m=+1510.100260959" Dec 02 19:01:19 crc kubenswrapper[4792]: I1202 19:01:19.555452 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0813b309-ff0c-4dc1-9ff0-66737db0dfe0" path="/var/lib/kubelet/pods/0813b309-ff0c-4dc1-9ff0-66737db0dfe0/volumes" Dec 02 19:01:19 crc kubenswrapper[4792]: I1202 19:01:19.657434 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.217:8775/\": read tcp 10.217.0.2:52964->10.217.0.217:8775: read: connection reset by peer" Dec 02 19:01:19 crc kubenswrapper[4792]: I1202 19:01:19.657473 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.217:8775/\": read tcp 10.217.0.2:52970->10.217.0.217:8775: read: connection reset by peer" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.271912 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.327583 4792 generic.go:334] "Generic (PLEG): container finished" podID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerID="dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37" exitCode=0 Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.328870 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.329279 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c128c153-5ac8-4dd5-9952-4ad8960e6525","Type":"ContainerDied","Data":"dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37"} Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.329301 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c128c153-5ac8-4dd5-9952-4ad8960e6525","Type":"ContainerDied","Data":"4c54e402de832e5557ce045c6cd5f6f74fb0ed73086eb6704790ddd1d9baecff"} Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.329317 4792 scope.go:117] "RemoveContainer" containerID="dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.374425 4792 scope.go:117] "RemoveContainer" containerID="0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.386180 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-combined-ca-bundle\") pod \"c128c153-5ac8-4dd5-9952-4ad8960e6525\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.386337 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-nova-metadata-tls-certs\") pod \"c128c153-5ac8-4dd5-9952-4ad8960e6525\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.386395 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-config-data\") pod \"c128c153-5ac8-4dd5-9952-4ad8960e6525\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.386466 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htqct\" (UniqueName: \"kubernetes.io/projected/c128c153-5ac8-4dd5-9952-4ad8960e6525-kube-api-access-htqct\") pod \"c128c153-5ac8-4dd5-9952-4ad8960e6525\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.386510 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c128c153-5ac8-4dd5-9952-4ad8960e6525-logs\") pod \"c128c153-5ac8-4dd5-9952-4ad8960e6525\" (UID: \"c128c153-5ac8-4dd5-9952-4ad8960e6525\") " Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.393042 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c128c153-5ac8-4dd5-9952-4ad8960e6525-logs" (OuterVolumeSpecName: "logs") pod "c128c153-5ac8-4dd5-9952-4ad8960e6525" (UID: "c128c153-5ac8-4dd5-9952-4ad8960e6525"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.412970 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c128c153-5ac8-4dd5-9952-4ad8960e6525-kube-api-access-htqct" (OuterVolumeSpecName: "kube-api-access-htqct") pod "c128c153-5ac8-4dd5-9952-4ad8960e6525" (UID: "c128c153-5ac8-4dd5-9952-4ad8960e6525"). InnerVolumeSpecName "kube-api-access-htqct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.424313 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c128c153-5ac8-4dd5-9952-4ad8960e6525" (UID: "c128c153-5ac8-4dd5-9952-4ad8960e6525"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.432964 4792 scope.go:117] "RemoveContainer" containerID="dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37" Dec 02 19:01:20 crc kubenswrapper[4792]: E1202 19:01:20.447756 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37\": container with ID starting with dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37 not found: ID does not exist" containerID="dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.447792 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37"} err="failed to get container status \"dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37\": rpc error: code = NotFound desc = could not find container \"dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37\": container with ID starting with dbbdaad6e6a279a4b33b36c96d3239a799c7b80079a7f2bda17236e0cf483e37 not found: ID does not exist" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.447834 4792 scope.go:117] "RemoveContainer" containerID="0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2" Dec 02 19:01:20 crc kubenswrapper[4792]: E1202 19:01:20.449313 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2\": container with ID starting with 0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2 not found: ID does not exist" containerID="0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.449392 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2"} err="failed to get container status \"0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2\": rpc error: code = NotFound desc = could not find container \"0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2\": container with ID starting with 0297cbe3a5372af31e88d534131030dd719530db04994c19293b49c1290996b2 not found: ID does not exist" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.488352 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-config-data" (OuterVolumeSpecName: "config-data") pod "c128c153-5ac8-4dd5-9952-4ad8960e6525" (UID: "c128c153-5ac8-4dd5-9952-4ad8960e6525"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.489587 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.489616 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.489629 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htqct\" (UniqueName: \"kubernetes.io/projected/c128c153-5ac8-4dd5-9952-4ad8960e6525-kube-api-access-htqct\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.489643 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c128c153-5ac8-4dd5-9952-4ad8960e6525-logs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.504698 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c128c153-5ac8-4dd5-9952-4ad8960e6525" (UID: "c128c153-5ac8-4dd5-9952-4ad8960e6525"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.591228 4792 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c128c153-5ac8-4dd5-9952-4ad8960e6525-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.688698 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.700849 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.708715 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:01:20 crc kubenswrapper[4792]: E1202 19:01:20.709180 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-metadata" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.709205 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-metadata" Dec 02 19:01:20 crc kubenswrapper[4792]: E1202 19:01:20.709232 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-log" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.709238 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-log" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.709486 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-log" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.709510 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" containerName="nova-metadata-metadata" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.710696 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.712992 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.713144 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.717433 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.795846 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.795922 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.795965 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jjzk\" (UniqueName: \"kubernetes.io/projected/f0928111-92a0-4459-896b-507add4ebc25-kube-api-access-6jjzk\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.796017 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-config-data\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.796054 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0928111-92a0-4459-896b-507add4ebc25-logs\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.898006 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-config-data\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.898079 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0928111-92a0-4459-896b-507add4ebc25-logs\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.898157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.898221 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.898264 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jjzk\" (UniqueName: \"kubernetes.io/projected/f0928111-92a0-4459-896b-507add4ebc25-kube-api-access-6jjzk\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.898901 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0928111-92a0-4459-896b-507add4ebc25-logs\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.903148 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.904471 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-config-data\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.906024 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0928111-92a0-4459-896b-507add4ebc25-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:20 crc kubenswrapper[4792]: I1202 19:01:20.917303 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jjzk\" (UniqueName: \"kubernetes.io/projected/f0928111-92a0-4459-896b-507add4ebc25-kube-api-access-6jjzk\") pod \"nova-metadata-0\" (UID: \"f0928111-92a0-4459-896b-507add4ebc25\") " pod="openstack/nova-metadata-0" Dec 02 19:01:21 crc kubenswrapper[4792]: I1202 19:01:21.075378 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 19:01:21 crc kubenswrapper[4792]: I1202 19:01:21.550796 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c128c153-5ac8-4dd5-9952-4ad8960e6525" path="/var/lib/kubelet/pods/c128c153-5ac8-4dd5-9952-4ad8960e6525/volumes" Dec 02 19:01:21 crc kubenswrapper[4792]: I1202 19:01:21.594618 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 19:01:21 crc kubenswrapper[4792]: W1202 19:01:21.602287 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0928111_92a0_4459_896b_507add4ebc25.slice/crio-17a0954506ccb9b50b160693d39118c82265dd471a213f1181231d57f1943e1f WatchSource:0}: Error finding container 17a0954506ccb9b50b160693d39118c82265dd471a213f1181231d57f1943e1f: Status 404 returned error can't find the container with id 17a0954506ccb9b50b160693d39118c82265dd471a213f1181231d57f1943e1f Dec 02 19:01:21 crc kubenswrapper[4792]: I1202 19:01:21.967441 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.124192 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-combined-ca-bundle\") pod \"89ec05ac-1237-4cfd-a36e-59bc6933890e\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.124262 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-config-data\") pod \"89ec05ac-1237-4cfd-a36e-59bc6933890e\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.124405 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6q4c\" (UniqueName: \"kubernetes.io/projected/89ec05ac-1237-4cfd-a36e-59bc6933890e-kube-api-access-f6q4c\") pod \"89ec05ac-1237-4cfd-a36e-59bc6933890e\" (UID: \"89ec05ac-1237-4cfd-a36e-59bc6933890e\") " Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.128460 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ec05ac-1237-4cfd-a36e-59bc6933890e-kube-api-access-f6q4c" (OuterVolumeSpecName: "kube-api-access-f6q4c") pod "89ec05ac-1237-4cfd-a36e-59bc6933890e" (UID: "89ec05ac-1237-4cfd-a36e-59bc6933890e"). InnerVolumeSpecName "kube-api-access-f6q4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.159248 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-config-data" (OuterVolumeSpecName: "config-data") pod "89ec05ac-1237-4cfd-a36e-59bc6933890e" (UID: "89ec05ac-1237-4cfd-a36e-59bc6933890e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.164679 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89ec05ac-1237-4cfd-a36e-59bc6933890e" (UID: "89ec05ac-1237-4cfd-a36e-59bc6933890e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.229350 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.229393 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ec05ac-1237-4cfd-a36e-59bc6933890e-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.229406 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6q4c\" (UniqueName: \"kubernetes.io/projected/89ec05ac-1237-4cfd-a36e-59bc6933890e-kube-api-access-f6q4c\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.372139 4792 generic.go:334] "Generic (PLEG): container finished" podID="89ec05ac-1237-4cfd-a36e-59bc6933890e" containerID="79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" exitCode=0 Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.372220 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.372221 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"89ec05ac-1237-4cfd-a36e-59bc6933890e","Type":"ContainerDied","Data":"79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a"} Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.372572 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"89ec05ac-1237-4cfd-a36e-59bc6933890e","Type":"ContainerDied","Data":"022f3a17a3a4d4df8d301a55ad40e3a37e5d1364f5d5e1b21bc584f046278069"} Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.372602 4792 scope.go:117] "RemoveContainer" containerID="79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.375108 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f0928111-92a0-4459-896b-507add4ebc25","Type":"ContainerStarted","Data":"641ce511afeec1120bb09f03ae8b4013ecd123c0646ad3966f63a7e3d36e13cb"} Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.375154 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f0928111-92a0-4459-896b-507add4ebc25","Type":"ContainerStarted","Data":"f71432cc4e9c20f931a80a9b1b5f46f31f951f902cf79068dbfb91fbe971188b"} Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.375172 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f0928111-92a0-4459-896b-507add4ebc25","Type":"ContainerStarted","Data":"17a0954506ccb9b50b160693d39118c82265dd471a213f1181231d57f1943e1f"} Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.398011 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.397980987 podStartE2EDuration="2.397980987s" podCreationTimestamp="2025-12-02 19:01:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:01:22.394049254 +0000 UTC m=+1513.166941633" watchObservedRunningTime="2025-12-02 19:01:22.397980987 +0000 UTC m=+1513.170873355" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.414421 4792 scope.go:117] "RemoveContainer" containerID="79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" Dec 02 19:01:22 crc kubenswrapper[4792]: E1202 19:01:22.415064 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a\": container with ID starting with 79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a not found: ID does not exist" containerID="79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.415106 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a"} err="failed to get container status \"79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a\": rpc error: code = NotFound desc = could not find container \"79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a\": container with ID starting with 79d0389c50e6449e05d91d34bf7774e493d3817a1010ae7a83c05754f8cccf7a not found: ID does not exist" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.440743 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.448988 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.458241 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:01:22 crc kubenswrapper[4792]: E1202 19:01:22.458751 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ec05ac-1237-4cfd-a36e-59bc6933890e" containerName="nova-scheduler-scheduler" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.458768 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ec05ac-1237-4cfd-a36e-59bc6933890e" containerName="nova-scheduler-scheduler" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.459015 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ec05ac-1237-4cfd-a36e-59bc6933890e" containerName="nova-scheduler-scheduler" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.459793 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.463476 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.470115 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.541278 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15549a23-1a53-41b1-84f5-a7bfda08faed-config-data\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.541991 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rph5p\" (UniqueName: \"kubernetes.io/projected/15549a23-1a53-41b1-84f5-a7bfda08faed-kube-api-access-rph5p\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.542224 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15549a23-1a53-41b1-84f5-a7bfda08faed-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.648994 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rph5p\" (UniqueName: \"kubernetes.io/projected/15549a23-1a53-41b1-84f5-a7bfda08faed-kube-api-access-rph5p\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.649508 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15549a23-1a53-41b1-84f5-a7bfda08faed-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.649939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15549a23-1a53-41b1-84f5-a7bfda08faed-config-data\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.654441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15549a23-1a53-41b1-84f5-a7bfda08faed-config-data\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.655112 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15549a23-1a53-41b1-84f5-a7bfda08faed-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.665337 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rph5p\" (UniqueName: \"kubernetes.io/projected/15549a23-1a53-41b1-84f5-a7bfda08faed-kube-api-access-rph5p\") pod \"nova-scheduler-0\" (UID: \"15549a23-1a53-41b1-84f5-a7bfda08faed\") " pod="openstack/nova-scheduler-0" Dec 02 19:01:22 crc kubenswrapper[4792]: I1202 19:01:22.805595 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 19:01:23 crc kubenswrapper[4792]: W1202 19:01:23.378751 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15549a23_1a53_41b1_84f5_a7bfda08faed.slice/crio-56ba86fe23a9a8b73b2ee69f63f0bf3c34bc18c178a6af00605ec208a76f4a2c WatchSource:0}: Error finding container 56ba86fe23a9a8b73b2ee69f63f0bf3c34bc18c178a6af00605ec208a76f4a2c: Status 404 returned error can't find the container with id 56ba86fe23a9a8b73b2ee69f63f0bf3c34bc18c178a6af00605ec208a76f4a2c Dec 02 19:01:23 crc kubenswrapper[4792]: I1202 19:01:23.388292 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 19:01:23 crc kubenswrapper[4792]: I1202 19:01:23.552197 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89ec05ac-1237-4cfd-a36e-59bc6933890e" path="/var/lib/kubelet/pods/89ec05ac-1237-4cfd-a36e-59bc6933890e/volumes" Dec 02 19:01:24 crc kubenswrapper[4792]: I1202 19:01:24.409414 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"15549a23-1a53-41b1-84f5-a7bfda08faed","Type":"ContainerStarted","Data":"a69098192badf81a04bdfe1893ae49ece15b0de3a2f0b9e32fbe6dfc040a4df6"} Dec 02 19:01:24 crc kubenswrapper[4792]: I1202 19:01:24.409798 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"15549a23-1a53-41b1-84f5-a7bfda08faed","Type":"ContainerStarted","Data":"56ba86fe23a9a8b73b2ee69f63f0bf3c34bc18c178a6af00605ec208a76f4a2c"} Dec 02 19:01:24 crc kubenswrapper[4792]: I1202 19:01:24.442997 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.442969181 podStartE2EDuration="2.442969181s" podCreationTimestamp="2025-12-02 19:01:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:01:24.435562428 +0000 UTC m=+1515.208454766" watchObservedRunningTime="2025-12-02 19:01:24.442969181 +0000 UTC m=+1515.215861539" Dec 02 19:01:26 crc kubenswrapper[4792]: I1202 19:01:26.076430 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 19:01:26 crc kubenswrapper[4792]: I1202 19:01:26.076782 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 19:01:27 crc kubenswrapper[4792]: I1202 19:01:27.805916 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 19:01:27 crc kubenswrapper[4792]: I1202 19:01:27.997181 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 19:01:27 crc kubenswrapper[4792]: I1202 19:01:27.997938 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 19:01:29 crc kubenswrapper[4792]: I1202 19:01:29.013775 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ffde9a1-e11f-4216-890b-7992f6e1b84c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.229:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 19:01:29 crc kubenswrapper[4792]: I1202 19:01:29.013795 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6ffde9a1-e11f-4216-890b-7992f6e1b84c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.229:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 19:01:31 crc kubenswrapper[4792]: I1202 19:01:31.076933 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 19:01:31 crc kubenswrapper[4792]: I1202 19:01:31.077413 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 19:01:32 crc kubenswrapper[4792]: I1202 19:01:32.093731 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f0928111-92a0-4459-896b-507add4ebc25" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 19:01:32 crc kubenswrapper[4792]: I1202 19:01:32.093754 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f0928111-92a0-4459-896b-507add4ebc25" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 19:01:32 crc kubenswrapper[4792]: I1202 19:01:32.806424 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 19:01:32 crc kubenswrapper[4792]: I1202 19:01:32.853114 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 19:01:33 crc kubenswrapper[4792]: I1202 19:01:33.582739 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 19:01:36 crc kubenswrapper[4792]: I1202 19:01:36.455174 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.005437 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.005971 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.006427 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.006476 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.015115 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.017270 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.082892 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:01:38 crc kubenswrapper[4792]: I1202 19:01:38.082966 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:01:41 crc kubenswrapper[4792]: I1202 19:01:41.084571 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 19:01:41 crc kubenswrapper[4792]: I1202 19:01:41.090112 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 19:01:41 crc kubenswrapper[4792]: I1202 19:01:41.097264 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 19:01:41 crc kubenswrapper[4792]: I1202 19:01:41.645672 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 19:01:51 crc kubenswrapper[4792]: I1202 19:01:51.901906 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-wh2hr"] Dec 02 19:01:51 crc kubenswrapper[4792]: I1202 19:01:51.913210 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-wh2hr"] Dec 02 19:01:51 crc kubenswrapper[4792]: I1202 19:01:51.960019 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-vj9gg"] Dec 02 19:01:51 crc kubenswrapper[4792]: I1202 19:01:51.961279 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:51 crc kubenswrapper[4792]: I1202 19:01:51.966207 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 19:01:51 crc kubenswrapper[4792]: I1202 19:01:51.977638 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-vj9gg"] Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.039967 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v9kw\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-kube-api-access-2v9kw\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.040066 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-scripts\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.040386 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-certs\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.040542 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-combined-ca-bundle\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.040593 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-config-data\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.142910 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v9kw\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-kube-api-access-2v9kw\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.143002 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-scripts\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.143091 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-certs\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.143127 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-combined-ca-bundle\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.143145 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-config-data\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.151288 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-certs\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.151647 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-config-data\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.151655 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-scripts\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.153081 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-combined-ca-bundle\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.161227 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v9kw\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-kube-api-access-2v9kw\") pod \"cloudkitty-db-sync-vj9gg\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.278442 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:01:52 crc kubenswrapper[4792]: I1202 19:01:52.791339 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-vj9gg"] Dec 02 19:01:53 crc kubenswrapper[4792]: I1202 19:01:53.555183 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3df2077b-8a01-47ae-ad22-abfc02071c24" path="/var/lib/kubelet/pods/3df2077b-8a01-47ae-ad22-abfc02071c24/volumes" Dec 02 19:01:53 crc kubenswrapper[4792]: I1202 19:01:53.809169 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:53 crc kubenswrapper[4792]: I1202 19:01:53.810868 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="sg-core" containerID="cri-o://22d608f98046c7321e11f65e21e6a3cd1b95dbcab69bf214abafd6aacf9991a0" gracePeriod=30 Dec 02 19:01:53 crc kubenswrapper[4792]: I1202 19:01:53.810968 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="proxy-httpd" containerID="cri-o://afdb78c76a85146d971da39dd995b42e691b0b459700c2087581bd1b0dd91538" gracePeriod=30 Dec 02 19:01:53 crc kubenswrapper[4792]: I1202 19:01:53.811022 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-notification-agent" containerID="cri-o://903682506d1219b834fcdcbfc534d747e1fb47d76b7d588807598497b8dde7bb" gracePeriod=30 Dec 02 19:01:53 crc kubenswrapper[4792]: I1202 19:01:53.810502 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-central-agent" containerID="cri-o://70363146d7872bb5e3f6ff7639671d03e363a4f84a59b58b8b0e742324e40560" gracePeriod=30 Dec 02 19:01:53 crc kubenswrapper[4792]: I1202 19:01:53.832814 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-vj9gg" event={"ID":"809cb60d-1a06-4216-8f74-882aa0d7470e","Type":"ContainerStarted","Data":"3390ce5a448f9f52e9e772accdd2e630587244614dccd4739f3e3dd13532f73c"} Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.055982 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.816471 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.848758 4792 generic.go:334] "Generic (PLEG): container finished" podID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerID="afdb78c76a85146d971da39dd995b42e691b0b459700c2087581bd1b0dd91538" exitCode=0 Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.848814 4792 generic.go:334] "Generic (PLEG): container finished" podID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerID="22d608f98046c7321e11f65e21e6a3cd1b95dbcab69bf214abafd6aacf9991a0" exitCode=2 Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.848824 4792 generic.go:334] "Generic (PLEG): container finished" podID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerID="70363146d7872bb5e3f6ff7639671d03e363a4f84a59b58b8b0e742324e40560" exitCode=0 Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.848829 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerDied","Data":"afdb78c76a85146d971da39dd995b42e691b0b459700c2087581bd1b0dd91538"} Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.848882 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerDied","Data":"22d608f98046c7321e11f65e21e6a3cd1b95dbcab69bf214abafd6aacf9991a0"} Dec 02 19:01:54 crc kubenswrapper[4792]: I1202 19:01:54.848893 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerDied","Data":"70363146d7872bb5e3f6ff7639671d03e363a4f84a59b58b8b0e742324e40560"} Dec 02 19:01:56 crc kubenswrapper[4792]: I1202 19:01:56.885034 4792 generic.go:334] "Generic (PLEG): container finished" podID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerID="903682506d1219b834fcdcbfc534d747e1fb47d76b7d588807598497b8dde7bb" exitCode=0 Dec 02 19:01:56 crc kubenswrapper[4792]: I1202 19:01:56.885389 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerDied","Data":"903682506d1219b834fcdcbfc534d747e1fb47d76b7d588807598497b8dde7bb"} Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.206006 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.348687 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-scripts\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.348753 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-log-httpd\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.348860 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-combined-ca-bundle\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.348897 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-run-httpd\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.348958 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2k5v\" (UniqueName: \"kubernetes.io/projected/592ba145-17cf-4929-ad52-a324bd87fe0a-kube-api-access-c2k5v\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.348984 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-sg-core-conf-yaml\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.349084 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-ceilometer-tls-certs\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.349116 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-config-data\") pod \"592ba145-17cf-4929-ad52-a324bd87fe0a\" (UID: \"592ba145-17cf-4929-ad52-a324bd87fe0a\") " Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.349290 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.349573 4792 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.349648 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.355704 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-scripts" (OuterVolumeSpecName: "scripts") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.369677 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/592ba145-17cf-4929-ad52-a324bd87fe0a-kube-api-access-c2k5v" (OuterVolumeSpecName: "kube-api-access-c2k5v") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "kube-api-access-c2k5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.389169 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.418025 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.451407 4792 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/592ba145-17cf-4929-ad52-a324bd87fe0a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.451449 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2k5v\" (UniqueName: \"kubernetes.io/projected/592ba145-17cf-4929-ad52-a324bd87fe0a-kube-api-access-c2k5v\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.451465 4792 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.451478 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.451489 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.467697 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-config-data" (OuterVolumeSpecName: "config-data") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.500568 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "592ba145-17cf-4929-ad52-a324bd87fe0a" (UID: "592ba145-17cf-4929-ad52-a324bd87fe0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.556512 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.556553 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592ba145-17cf-4929-ad52-a324bd87fe0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.901745 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"592ba145-17cf-4929-ad52-a324bd87fe0a","Type":"ContainerDied","Data":"7f7e427e7aa3a867188bda3f1c806cfad164da1a3a6197d04a138f089a30db4f"} Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.901989 4792 scope.go:117] "RemoveContainer" containerID="afdb78c76a85146d971da39dd995b42e691b0b459700c2087581bd1b0dd91538" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.902114 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.926636 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.934169 4792 scope.go:117] "RemoveContainer" containerID="22d608f98046c7321e11f65e21e6a3cd1b95dbcab69bf214abafd6aacf9991a0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.941983 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951128 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:57 crc kubenswrapper[4792]: E1202 19:01:57.951534 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-notification-agent" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951553 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-notification-agent" Dec 02 19:01:57 crc kubenswrapper[4792]: E1202 19:01:57.951571 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-central-agent" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951579 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-central-agent" Dec 02 19:01:57 crc kubenswrapper[4792]: E1202 19:01:57.951600 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="proxy-httpd" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951605 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="proxy-httpd" Dec 02 19:01:57 crc kubenswrapper[4792]: E1202 19:01:57.951628 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="sg-core" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951634 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="sg-core" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951809 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-notification-agent" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951826 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="ceilometer-central-agent" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951845 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="sg-core" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.951860 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" containerName="proxy-httpd" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.953580 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.965041 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.988201 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.988278 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.988566 4792 scope.go:117] "RemoveContainer" containerID="903682506d1219b834fcdcbfc534d747e1fb47d76b7d588807598497b8dde7bb" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.989230 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991075 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991151 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-scripts\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991173 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-config-data\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991219 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcq45\" (UniqueName: \"kubernetes.io/projected/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-kube-api-access-wcq45\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991279 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991312 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991349 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-log-httpd\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:57 crc kubenswrapper[4792]: I1202 19:01:57.991733 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-run-httpd\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.049871 4792 scope.go:117] "RemoveContainer" containerID="70363146d7872bb5e3f6ff7639671d03e363a4f84a59b58b8b0e742324e40560" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.092824 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-log-httpd\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.092912 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-run-httpd\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.092930 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.092981 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-scripts\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.093002 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-config-data\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.093039 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcq45\" (UniqueName: \"kubernetes.io/projected/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-kube-api-access-wcq45\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.093075 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.093102 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.093310 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-log-httpd\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.093356 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-run-httpd\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.098183 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.099634 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-config-data\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.100417 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.101352 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-scripts\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.109159 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.137157 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcq45\" (UniqueName: \"kubernetes.io/projected/45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2-kube-api-access-wcq45\") pod \"ceilometer-0\" (UID: \"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2\") " pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.323223 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.755729 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="rabbitmq" containerID="cri-o://2b24c929b322faa198f40aeb0652d002a7c1fa7fe0d8ade0379bccd9e518bbcb" gracePeriod=604796 Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.838635 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.915823 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2","Type":"ContainerStarted","Data":"15847471e9bc87fb2832211b88ae456e39750b01d53a6cb2ee4542d662d201a0"} Dec 02 19:01:58 crc kubenswrapper[4792]: I1202 19:01:58.968717 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="rabbitmq" containerID="cri-o://4ffe4ac1c8aaa9d73926bbef55548588a25905b08bce9cdf7a9fec6975c74a62" gracePeriod=604796 Dec 02 19:01:59 crc kubenswrapper[4792]: I1202 19:01:59.318422 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Dec 02 19:01:59 crc kubenswrapper[4792]: I1202 19:01:59.354508 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Dec 02 19:01:59 crc kubenswrapper[4792]: I1202 19:01:59.553891 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="592ba145-17cf-4929-ad52-a324bd87fe0a" path="/var/lib/kubelet/pods/592ba145-17cf-4929-ad52-a324bd87fe0a/volumes" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.850550 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-qknx2"] Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.853156 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.855001 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.858797 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-qknx2"] Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.933647 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.933692 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.933725 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-config\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.933746 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.933773 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.933823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbw87\" (UniqueName: \"kubernetes.io/projected/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-kube-api-access-xbw87\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:06 crc kubenswrapper[4792]: I1202 19:02:06.933844 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.028755 4792 generic.go:334] "Generic (PLEG): container finished" podID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerID="4ffe4ac1c8aaa9d73926bbef55548588a25905b08bce9cdf7a9fec6975c74a62" exitCode=0 Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.028845 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3d0661cf-534a-4951-9e56-7db65fdfd242","Type":"ContainerDied","Data":"4ffe4ac1c8aaa9d73926bbef55548588a25905b08bce9cdf7a9fec6975c74a62"} Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.031228 4792 generic.go:334] "Generic (PLEG): container finished" podID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerID="2b24c929b322faa198f40aeb0652d002a7c1fa7fe0d8ade0379bccd9e518bbcb" exitCode=0 Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.031260 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba","Type":"ContainerDied","Data":"2b24c929b322faa198f40aeb0652d002a7c1fa7fe0d8ade0379bccd9e518bbcb"} Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.035820 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbw87\" (UniqueName: \"kubernetes.io/projected/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-kube-api-access-xbw87\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.035882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.036031 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.036061 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.036113 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-config\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.036138 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.036186 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.037629 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.037908 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-config\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.037913 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.038239 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.038401 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.043011 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.054788 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbw87\" (UniqueName: \"kubernetes.io/projected/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-kube-api-access-xbw87\") pod \"dnsmasq-dns-dbb88bf8c-qknx2\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:07 crc kubenswrapper[4792]: I1202 19:02:07.190959 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:08 crc kubenswrapper[4792]: I1202 19:02:08.081316 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:02:08 crc kubenswrapper[4792]: I1202 19:02:08.081392 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:02:08 crc kubenswrapper[4792]: I1202 19:02:08.081451 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:02:08 crc kubenswrapper[4792]: I1202 19:02:08.082593 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:02:08 crc kubenswrapper[4792]: I1202 19:02:08.083103 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" gracePeriod=600 Dec 02 19:02:09 crc kubenswrapper[4792]: I1202 19:02:09.053231 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" exitCode=0 Dec 02 19:02:09 crc kubenswrapper[4792]: I1202 19:02:09.053253 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523"} Dec 02 19:02:09 crc kubenswrapper[4792]: I1202 19:02:09.053548 4792 scope.go:117] "RemoveContainer" containerID="dc04fc6a49e6cf5090b01aa7a72bef9189df42704e3c79ebf0699d5a961190bd" Dec 02 19:02:09 crc kubenswrapper[4792]: I1202 19:02:09.318930 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Dec 02 19:02:09 crc kubenswrapper[4792]: I1202 19:02:09.354399 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Dec 02 19:02:11 crc kubenswrapper[4792]: E1202 19:02:11.957574 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.085571 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:02:12 crc kubenswrapper[4792]: E1202 19:02:12.086153 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:02:12 crc kubenswrapper[4792]: E1202 19:02:12.565413 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 02 19:02:12 crc kubenswrapper[4792]: E1202 19:02:12.565470 4792 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 02 19:02:12 crc kubenswrapper[4792]: E1202 19:02:12.565613 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2v9kw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-vj9gg_openstack(809cb60d-1a06-4216-8f74-882aa0d7470e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 19:02:12 crc kubenswrapper[4792]: E1202 19:02:12.566804 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-vj9gg" podUID="809cb60d-1a06-4216-8f74-882aa0d7470e" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.616784 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.800992 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-erlang-cookie\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801131 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-tls\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801174 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-plugins\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801227 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-756l7\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-kube-api-access-756l7\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801249 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-plugins-conf\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801269 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-config-data\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801334 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3d0661cf-534a-4951-9e56-7db65fdfd242-pod-info\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801359 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-confd\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801377 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-server-conf\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.801401 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3d0661cf-534a-4951-9e56-7db65fdfd242-erlang-cookie-secret\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.802787 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"3d0661cf-534a-4951-9e56-7db65fdfd242\" (UID: \"3d0661cf-534a-4951-9e56-7db65fdfd242\") " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.803146 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.804958 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.805665 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.805885 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.811342 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d0661cf-534a-4951-9e56-7db65fdfd242-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.812475 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.813641 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/3d0661cf-534a-4951-9e56-7db65fdfd242-pod-info" (OuterVolumeSpecName: "pod-info") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.833543 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-kube-api-access-756l7" (OuterVolumeSpecName: "kube-api-access-756l7") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "kube-api-access-756l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.854019 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7" (OuterVolumeSpecName: "persistence") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.889971 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-config-data" (OuterVolumeSpecName: "config-data") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.901358 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-server-conf" (OuterVolumeSpecName: "server-conf") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.906939 4792 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3d0661cf-534a-4951-9e56-7db65fdfd242-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.906960 4792 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.906971 4792 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3d0661cf-534a-4951-9e56-7db65fdfd242-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.907004 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") on node \"crc\" " Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.907021 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.907031 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.907042 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-756l7\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-kube-api-access-756l7\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.907051 4792 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.907060 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d0661cf-534a-4951-9e56-7db65fdfd242-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.908932 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "3d0661cf-534a-4951-9e56-7db65fdfd242" (UID: "3d0661cf-534a-4951-9e56-7db65fdfd242"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.934564 4792 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 19:02:12 crc kubenswrapper[4792]: I1202 19:02:12.935768 4792 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7") on node "crc" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.017414 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3d0661cf-534a-4951-9e56-7db65fdfd242-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.017463 4792 reconciler_common.go:293] "Volume detached for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.101545 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.101714 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3d0661cf-534a-4951-9e56-7db65fdfd242","Type":"ContainerDied","Data":"c24096e3121c825293ace7f19aa00c6dc24a745a43a9ddaa9cd40ee4ae29594d"} Dec 02 19:02:13 crc kubenswrapper[4792]: E1202 19:02:13.103065 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested\\\"\"" pod="openstack/cloudkitty-db-sync-vj9gg" podUID="809cb60d-1a06-4216-8f74-882aa0d7470e" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.151000 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.160870 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.176828 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 19:02:13 crc kubenswrapper[4792]: E1202 19:02:13.177305 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="setup-container" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.177323 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="setup-container" Dec 02 19:02:13 crc kubenswrapper[4792]: E1202 19:02:13.177342 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="rabbitmq" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.177348 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="rabbitmq" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.177572 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" containerName="rabbitmq" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.178845 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.183998 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.184118 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.184228 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.184269 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-zsftm" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.184319 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.184491 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.185765 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.186431 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221327 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221370 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c43c55d9-74e9-4158-a193-ee8ead807ad7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221426 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221457 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj7rm\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-kube-api-access-mj7rm\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221479 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221502 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221540 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c43c55d9-74e9-4158-a193-ee8ead807ad7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221577 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221797 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.221912 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.258758 4792 scope.go:117] "RemoveContainer" containerID="4ffe4ac1c8aaa9d73926bbef55548588a25905b08bce9cdf7a9fec6975c74a62" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.323746 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.323944 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c43c55d9-74e9-4158-a193-ee8ead807ad7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.323979 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324142 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj7rm\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-kube-api-access-mj7rm\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324194 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324220 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324247 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c43c55d9-74e9-4158-a193-ee8ead807ad7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324290 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324331 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324367 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324437 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.324961 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.325654 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.326760 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.326937 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.327497 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c43c55d9-74e9-4158-a193-ee8ead807ad7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.330749 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.330803 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d70c3530c6f343b08b17089f91da552efef4f7706ca172792211e9d032865e36/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.333137 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c43c55d9-74e9-4158-a193-ee8ead807ad7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.335190 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.336636 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.344093 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c43c55d9-74e9-4158-a193-ee8ead807ad7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.347715 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj7rm\" (UniqueName: \"kubernetes.io/projected/c43c55d9-74e9-4158-a193-ee8ead807ad7-kube-api-access-mj7rm\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.386890 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0fb911ca-2299-49b2-93df-b2fa32ed69d7\") pod \"rabbitmq-cell1-server-0\" (UID: \"c43c55d9-74e9-4158-a193-ee8ead807ad7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.406132 4792 scope.go:117] "RemoveContainer" containerID="a62c2df38630c7ebad8e25ccbad956ac0e5658754400e4b860a8eb3f0cf36f97" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.407456 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.527677 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-plugins\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.527970 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-tls\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528066 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-pod-info\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528139 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-server-conf\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528163 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-config-data\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528228 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-erlang-cookie-secret\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528671 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528710 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdbx6\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-kube-api-access-gdbx6\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528766 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528774 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-erlang-cookie\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528915 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-confd\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.528947 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-plugins-conf\") pod \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\" (UID: \"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba\") " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.529206 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.530040 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.530316 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.530328 4792 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.530336 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.532495 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.537614 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-pod-info" (OuterVolumeSpecName: "pod-info") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.544425 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-kube-api-access-gdbx6" (OuterVolumeSpecName: "kube-api-access-gdbx6") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "kube-api-access-gdbx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.544810 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.553607 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d0661cf-534a-4951-9e56-7db65fdfd242" path="/var/lib/kubelet/pods/3d0661cf-534a-4951-9e56-7db65fdfd242/volumes" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.564436 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-config-data" (OuterVolumeSpecName: "config-data") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.577583 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264" (OuterVolumeSpecName: "persistence") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "pvc-7dd44334-132b-482c-9476-999deb6f1264". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.607045 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-server-conf" (OuterVolumeSpecName: "server-conf") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.632103 4792 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.632135 4792 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.632145 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.632154 4792 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.632176 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") on node \"crc\" " Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.632186 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdbx6\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-kube-api-access-gdbx6\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.632195 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.661667 4792 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.661967 4792 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-7dd44334-132b-482c-9476-999deb6f1264" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264") on node "crc" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.675874 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" (UID: "c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.684980 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.704974 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-qknx2"] Dec 02 19:02:13 crc kubenswrapper[4792]: W1202 19:02:13.707790 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3cc6d17_81b1_431b_8af5_3fc48a65dc37.slice/crio-34ef662e319a46a47593d583d660ead413e5476947aa4a2ac6a01963b9232bf5 WatchSource:0}: Error finding container 34ef662e319a46a47593d583d660ead413e5476947aa4a2ac6a01963b9232bf5: Status 404 returned error can't find the container with id 34ef662e319a46a47593d583d660ead413e5476947aa4a2ac6a01963b9232bf5 Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.733633 4792 reconciler_common.go:293] "Volume detached for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:13 crc kubenswrapper[4792]: I1202 19:02:13.733662 4792 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.156241 4792 generic.go:334] "Generic (PLEG): container finished" podID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerID="179f3fc6055f21eed1f667d3cc74da7dbe9bd304358fcb7f4fb9575906985440" exitCode=0 Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.156714 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" event={"ID":"a3cc6d17-81b1-431b-8af5-3fc48a65dc37","Type":"ContainerDied","Data":"179f3fc6055f21eed1f667d3cc74da7dbe9bd304358fcb7f4fb9575906985440"} Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.156739 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" event={"ID":"a3cc6d17-81b1-431b-8af5-3fc48a65dc37","Type":"ContainerStarted","Data":"34ef662e319a46a47593d583d660ead413e5476947aa4a2ac6a01963b9232bf5"} Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.163568 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.164372 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba","Type":"ContainerDied","Data":"e29bba0e752559fb8b4e00d5a165c27578bfb7b76f8e49d6e15bc23a5db6b426"} Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.166433 4792 scope.go:117] "RemoveContainer" containerID="2b24c929b322faa198f40aeb0652d002a7c1fa7fe0d8ade0379bccd9e518bbcb" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.167985 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2","Type":"ContainerStarted","Data":"249e08f02cba9718b111150fb905a455824d9b95754ae4f78b2ac657eb00f947"} Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.215618 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.351507 4792 scope.go:117] "RemoveContainer" containerID="970ed1a9e9aefd99aafb9cf861569ab2b8f1f85a7d8d627a4cba42af2fd1adf8" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.387984 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.407583 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.416376 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 19:02:14 crc kubenswrapper[4792]: E1202 19:02:14.416804 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="rabbitmq" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.416821 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="rabbitmq" Dec 02 19:02:14 crc kubenswrapper[4792]: E1202 19:02:14.416855 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="setup-container" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.416861 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="setup-container" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.417061 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" containerName="rabbitmq" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.418084 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.420132 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-cnkhh" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.420463 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.420669 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.420815 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.420916 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.421092 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.421225 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.437188 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551241 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551320 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551371 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551395 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/197e738b-95d3-4250-b16a-e70331f46ba5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551455 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551483 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551599 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rlm2\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-kube-api-access-5rlm2\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551626 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551706 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-config-data\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551750 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.551837 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/197e738b-95d3-4250-b16a-e70331f46ba5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653314 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653367 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653389 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/197e738b-95d3-4250-b16a-e70331f46ba5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653442 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653480 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653547 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rlm2\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-kube-api-access-5rlm2\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653564 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653620 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-config-data\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653655 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653721 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/197e738b-95d3-4250-b16a-e70331f46ba5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.653819 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.655126 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.655312 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.656245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.656374 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-config-data\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.657160 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/197e738b-95d3-4250-b16a-e70331f46ba5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.657688 4792 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.657822 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6f7cf48cd4718bf0e3e1786efa8d94ee11134172315a8e6b7e6f42a5f9d770d6/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.658727 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/197e738b-95d3-4250-b16a-e70331f46ba5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.659076 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.659320 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/197e738b-95d3-4250-b16a-e70331f46ba5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.661072 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.675220 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rlm2\" (UniqueName: \"kubernetes.io/projected/197e738b-95d3-4250-b16a-e70331f46ba5-kube-api-access-5rlm2\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.712413 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7dd44334-132b-482c-9476-999deb6f1264\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7dd44334-132b-482c-9476-999deb6f1264\") pod \"rabbitmq-server-0\" (UID: \"197e738b-95d3-4250-b16a-e70331f46ba5\") " pod="openstack/rabbitmq-server-0" Dec 02 19:02:14 crc kubenswrapper[4792]: I1202 19:02:14.749098 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 19:02:15 crc kubenswrapper[4792]: I1202 19:02:15.179807 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c43c55d9-74e9-4158-a193-ee8ead807ad7","Type":"ContainerStarted","Data":"8e253acc8470878f4f952c7f2575a6d82ee667da15e0e23bd4a875e1ab498fe3"} Dec 02 19:02:15 crc kubenswrapper[4792]: I1202 19:02:15.186231 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" event={"ID":"a3cc6d17-81b1-431b-8af5-3fc48a65dc37","Type":"ContainerStarted","Data":"ab2b5633405cead41e2b55d9f149b430f0281e4ed51a6b54c58884baa8ffadc5"} Dec 02 19:02:15 crc kubenswrapper[4792]: I1202 19:02:15.187767 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:15 crc kubenswrapper[4792]: I1202 19:02:15.210790 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 19:02:15 crc kubenswrapper[4792]: I1202 19:02:15.212196 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2","Type":"ContainerStarted","Data":"df1e6d8aea0d229898588f728dd5ea430fe36eeb8654017418c5f1d5efd73433"} Dec 02 19:02:15 crc kubenswrapper[4792]: W1202 19:02:15.215019 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod197e738b_95d3_4250_b16a_e70331f46ba5.slice/crio-26abd61ad6423d5bd4ec60ff55d09f1ce510a866f7a4b023b1202605cba789e7 WatchSource:0}: Error finding container 26abd61ad6423d5bd4ec60ff55d09f1ce510a866f7a4b023b1202605cba789e7: Status 404 returned error can't find the container with id 26abd61ad6423d5bd4ec60ff55d09f1ce510a866f7a4b023b1202605cba789e7 Dec 02 19:02:15 crc kubenswrapper[4792]: I1202 19:02:15.222506 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" podStartSLOduration=9.222486491 podStartE2EDuration="9.222486491s" podCreationTimestamp="2025-12-02 19:02:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:02:15.20709441 +0000 UTC m=+1565.979986758" watchObservedRunningTime="2025-12-02 19:02:15.222486491 +0000 UTC m=+1565.995378819" Dec 02 19:02:15 crc kubenswrapper[4792]: I1202 19:02:15.552697 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba" path="/var/lib/kubelet/pods/c7022cd7-3ce3-4e20-b067-5bdb3a39a0ba/volumes" Dec 02 19:02:16 crc kubenswrapper[4792]: I1202 19:02:16.240184 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2","Type":"ContainerStarted","Data":"7bc1f05a7d253e9bfb13fd557ba7ff6d586eab551fcf8ff7155980319ea677a0"} Dec 02 19:02:16 crc kubenswrapper[4792]: I1202 19:02:16.242853 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"197e738b-95d3-4250-b16a-e70331f46ba5","Type":"ContainerStarted","Data":"26abd61ad6423d5bd4ec60ff55d09f1ce510a866f7a4b023b1202605cba789e7"} Dec 02 19:02:16 crc kubenswrapper[4792]: I1202 19:02:16.245655 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c43c55d9-74e9-4158-a193-ee8ead807ad7","Type":"ContainerStarted","Data":"eb341509a5e5e14b7592bba8d0f7320ab6061c13d411e7bf78c7f5d1fd729108"} Dec 02 19:02:17 crc kubenswrapper[4792]: I1202 19:02:17.295122 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2","Type":"ContainerStarted","Data":"ff06eda1aca895699e56c2fa531537f9fba68c148fd2c252596892f0ec49955f"} Dec 02 19:02:17 crc kubenswrapper[4792]: I1202 19:02:17.347598 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.771990364 podStartE2EDuration="20.347565979s" podCreationTimestamp="2025-12-02 19:01:57 +0000 UTC" firstStartedPulling="2025-12-02 19:01:58.84812653 +0000 UTC m=+1549.621018858" lastFinishedPulling="2025-12-02 19:02:16.423702145 +0000 UTC m=+1567.196594473" observedRunningTime="2025-12-02 19:02:17.330480484 +0000 UTC m=+1568.103372822" watchObservedRunningTime="2025-12-02 19:02:17.347565979 +0000 UTC m=+1568.120458317" Dec 02 19:02:18 crc kubenswrapper[4792]: I1202 19:02:18.311486 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"197e738b-95d3-4250-b16a-e70331f46ba5","Type":"ContainerStarted","Data":"8101c4a85920cba2963f7f4702b50ed24cbc7f9ba3d1f986b2bf1b73be9826af"} Dec 02 19:02:18 crc kubenswrapper[4792]: I1202 19:02:18.311877 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.192661 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.269472 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-w5ctg"] Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.269748 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" podUID="9b00586f-ffa2-4617-abe4-56758a897416" containerName="dnsmasq-dns" containerID="cri-o://e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa" gracePeriod=10 Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.450028 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-lrjg9"] Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.452553 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.473104 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-lrjg9"] Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.585032 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.585128 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.585161 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-dns-svc\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.585179 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.585197 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-config\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.585232 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.585260 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfhgs\" (UniqueName: \"kubernetes.io/projected/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-kube-api-access-bfhgs\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.687560 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.687618 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-dns-svc\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.687645 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.687667 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-config\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.687727 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.687751 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfhgs\" (UniqueName: \"kubernetes.io/projected/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-kube-api-access-bfhgs\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.687882 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.688644 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.689034 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.689220 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-config\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.689428 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.689628 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.689912 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-dns-svc\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.708455 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfhgs\" (UniqueName: \"kubernetes.io/projected/c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25-kube-api-access-bfhgs\") pod \"dnsmasq-dns-85f64749dc-lrjg9\" (UID: \"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25\") " pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.770760 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.899816 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.992937 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzk2j\" (UniqueName: \"kubernetes.io/projected/9b00586f-ffa2-4617-abe4-56758a897416-kube-api-access-tzk2j\") pod \"9b00586f-ffa2-4617-abe4-56758a897416\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.993034 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-sb\") pod \"9b00586f-ffa2-4617-abe4-56758a897416\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.993158 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-config\") pod \"9b00586f-ffa2-4617-abe4-56758a897416\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.993285 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-swift-storage-0\") pod \"9b00586f-ffa2-4617-abe4-56758a897416\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.993376 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-svc\") pod \"9b00586f-ffa2-4617-abe4-56758a897416\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.993448 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-nb\") pod \"9b00586f-ffa2-4617-abe4-56758a897416\" (UID: \"9b00586f-ffa2-4617-abe4-56758a897416\") " Dec 02 19:02:22 crc kubenswrapper[4792]: I1202 19:02:22.997751 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b00586f-ffa2-4617-abe4-56758a897416-kube-api-access-tzk2j" (OuterVolumeSpecName: "kube-api-access-tzk2j") pod "9b00586f-ffa2-4617-abe4-56758a897416" (UID: "9b00586f-ffa2-4617-abe4-56758a897416"). InnerVolumeSpecName "kube-api-access-tzk2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.051951 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9b00586f-ffa2-4617-abe4-56758a897416" (UID: "9b00586f-ffa2-4617-abe4-56758a897416"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.054204 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-config" (OuterVolumeSpecName: "config") pod "9b00586f-ffa2-4617-abe4-56758a897416" (UID: "9b00586f-ffa2-4617-abe4-56758a897416"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.069503 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9b00586f-ffa2-4617-abe4-56758a897416" (UID: "9b00586f-ffa2-4617-abe4-56758a897416"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.081845 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9b00586f-ffa2-4617-abe4-56758a897416" (UID: "9b00586f-ffa2-4617-abe4-56758a897416"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.086199 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9b00586f-ffa2-4617-abe4-56758a897416" (UID: "9b00586f-ffa2-4617-abe4-56758a897416"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.109489 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-config\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.109535 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.109547 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.109558 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.109566 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzk2j\" (UniqueName: \"kubernetes.io/projected/9b00586f-ffa2-4617-abe4-56758a897416-kube-api-access-tzk2j\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.109574 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9b00586f-ffa2-4617-abe4-56758a897416-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.267017 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-lrjg9"] Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.400503 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" event={"ID":"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25","Type":"ContainerStarted","Data":"708646862a3064a6f4623b06ddea64f431e9f9757ab22096fae4d90b2bd1e1f7"} Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.402925 4792 generic.go:334] "Generic (PLEG): container finished" podID="9b00586f-ffa2-4617-abe4-56758a897416" containerID="e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa" exitCode=0 Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.402953 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" event={"ID":"9b00586f-ffa2-4617-abe4-56758a897416","Type":"ContainerDied","Data":"e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa"} Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.402972 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" event={"ID":"9b00586f-ffa2-4617-abe4-56758a897416","Type":"ContainerDied","Data":"730cef0ff54a43e4e61bde2ddaac6da0e4af57635d63b5b2b70f1cf2fcc31646"} Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.402988 4792 scope.go:117] "RemoveContainer" containerID="e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.403018 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-w5ctg" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.502136 4792 scope.go:117] "RemoveContainer" containerID="6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.526695 4792 scope.go:117] "RemoveContainer" containerID="e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa" Dec 02 19:02:23 crc kubenswrapper[4792]: E1202 19:02:23.527233 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa\": container with ID starting with e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa not found: ID does not exist" containerID="e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.527275 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa"} err="failed to get container status \"e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa\": rpc error: code = NotFound desc = could not find container \"e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa\": container with ID starting with e0a4da9dd237407a1220dccdffad369d7cc795d3366e0ddf10644b20776095fa not found: ID does not exist" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.527303 4792 scope.go:117] "RemoveContainer" containerID="6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a" Dec 02 19:02:23 crc kubenswrapper[4792]: E1202 19:02:23.527765 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a\": container with ID starting with 6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a not found: ID does not exist" containerID="6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.527805 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a"} err="failed to get container status \"6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a\": rpc error: code = NotFound desc = could not find container \"6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a\": container with ID starting with 6306566cd0ac8b02100c06e10f85509efaa7fa4cb493fecb2da843b53293628a not found: ID does not exist" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.529363 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-w5ctg"] Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.537719 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-w5ctg"] Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.540384 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:02:23 crc kubenswrapper[4792]: E1202 19:02:23.540674 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:02:23 crc kubenswrapper[4792]: I1202 19:02:23.553683 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b00586f-ffa2-4617-abe4-56758a897416" path="/var/lib/kubelet/pods/9b00586f-ffa2-4617-abe4-56758a897416/volumes" Dec 02 19:02:24 crc kubenswrapper[4792]: I1202 19:02:24.421090 4792 generic.go:334] "Generic (PLEG): container finished" podID="c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25" containerID="717c5acac487d623e5f5777490b6a362e54c21f5e3a16d28e6edcb200b910283" exitCode=0 Dec 02 19:02:24 crc kubenswrapper[4792]: I1202 19:02:24.421335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" event={"ID":"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25","Type":"ContainerDied","Data":"717c5acac487d623e5f5777490b6a362e54c21f5e3a16d28e6edcb200b910283"} Dec 02 19:02:25 crc kubenswrapper[4792]: I1202 19:02:25.433749 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" event={"ID":"c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25","Type":"ContainerStarted","Data":"18852538602e26d2d8412097285b769bd3d68709eebf287966668d2e2c300b4a"} Dec 02 19:02:25 crc kubenswrapper[4792]: I1202 19:02:25.435000 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:25 crc kubenswrapper[4792]: I1202 19:02:25.465573 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" podStartSLOduration=3.4655511580000002 podStartE2EDuration="3.465551158s" podCreationTimestamp="2025-12-02 19:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:02:25.452812847 +0000 UTC m=+1576.225705185" watchObservedRunningTime="2025-12-02 19:02:25.465551158 +0000 UTC m=+1576.238443496" Dec 02 19:02:25 crc kubenswrapper[4792]: I1202 19:02:25.742657 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 19:02:26 crc kubenswrapper[4792]: I1202 19:02:26.443830 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-vj9gg" event={"ID":"809cb60d-1a06-4216-8f74-882aa0d7470e","Type":"ContainerStarted","Data":"f58d2390289bd232c0a6a7727eb8a697e0c2173db747207551bb11038525df1a"} Dec 02 19:02:26 crc kubenswrapper[4792]: I1202 19:02:26.459748 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-vj9gg" podStartSLOduration=2.5348321929999997 podStartE2EDuration="35.459731942s" podCreationTimestamp="2025-12-02 19:01:51 +0000 UTC" firstStartedPulling="2025-12-02 19:01:52.808962185 +0000 UTC m=+1543.581854523" lastFinishedPulling="2025-12-02 19:02:25.733861934 +0000 UTC m=+1576.506754272" observedRunningTime="2025-12-02 19:02:26.459033974 +0000 UTC m=+1577.231926292" watchObservedRunningTime="2025-12-02 19:02:26.459731942 +0000 UTC m=+1577.232624270" Dec 02 19:02:28 crc kubenswrapper[4792]: I1202 19:02:28.338240 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 19:02:28 crc kubenswrapper[4792]: I1202 19:02:28.470331 4792 generic.go:334] "Generic (PLEG): container finished" podID="809cb60d-1a06-4216-8f74-882aa0d7470e" containerID="f58d2390289bd232c0a6a7727eb8a697e0c2173db747207551bb11038525df1a" exitCode=0 Dec 02 19:02:28 crc kubenswrapper[4792]: I1202 19:02:28.470381 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-vj9gg" event={"ID":"809cb60d-1a06-4216-8f74-882aa0d7470e","Type":"ContainerDied","Data":"f58d2390289bd232c0a6a7727eb8a697e0c2173db747207551bb11038525df1a"} Dec 02 19:02:29 crc kubenswrapper[4792]: I1202 19:02:29.981685 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.062637 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-combined-ca-bundle\") pod \"809cb60d-1a06-4216-8f74-882aa0d7470e\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.062714 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-certs\") pod \"809cb60d-1a06-4216-8f74-882aa0d7470e\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.062862 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-config-data\") pod \"809cb60d-1a06-4216-8f74-882aa0d7470e\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.062941 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-scripts\") pod \"809cb60d-1a06-4216-8f74-882aa0d7470e\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.062991 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2v9kw\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-kube-api-access-2v9kw\") pod \"809cb60d-1a06-4216-8f74-882aa0d7470e\" (UID: \"809cb60d-1a06-4216-8f74-882aa0d7470e\") " Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.069110 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-scripts" (OuterVolumeSpecName: "scripts") pod "809cb60d-1a06-4216-8f74-882aa0d7470e" (UID: "809cb60d-1a06-4216-8f74-882aa0d7470e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.080691 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-certs" (OuterVolumeSpecName: "certs") pod "809cb60d-1a06-4216-8f74-882aa0d7470e" (UID: "809cb60d-1a06-4216-8f74-882aa0d7470e"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.089150 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-kube-api-access-2v9kw" (OuterVolumeSpecName: "kube-api-access-2v9kw") pod "809cb60d-1a06-4216-8f74-882aa0d7470e" (UID: "809cb60d-1a06-4216-8f74-882aa0d7470e"). InnerVolumeSpecName "kube-api-access-2v9kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.095194 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "809cb60d-1a06-4216-8f74-882aa0d7470e" (UID: "809cb60d-1a06-4216-8f74-882aa0d7470e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.098310 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-config-data" (OuterVolumeSpecName: "config-data") pod "809cb60d-1a06-4216-8f74-882aa0d7470e" (UID: "809cb60d-1a06-4216-8f74-882aa0d7470e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.164907 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.164938 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.164947 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.164957 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/809cb60d-1a06-4216-8f74-882aa0d7470e-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.164968 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2v9kw\" (UniqueName: \"kubernetes.io/projected/809cb60d-1a06-4216-8f74-882aa0d7470e-kube-api-access-2v9kw\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.496648 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-vj9gg" event={"ID":"809cb60d-1a06-4216-8f74-882aa0d7470e","Type":"ContainerDied","Data":"3390ce5a448f9f52e9e772accdd2e630587244614dccd4739f3e3dd13532f73c"} Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.496682 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-vj9gg" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.496690 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3390ce5a448f9f52e9e772accdd2e630587244614dccd4739f3e3dd13532f73c" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.651197 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-sj984"] Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.657136 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-sj984"] Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.723951 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-c5g69"] Dec 02 19:02:30 crc kubenswrapper[4792]: E1202 19:02:30.724441 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="809cb60d-1a06-4216-8f74-882aa0d7470e" containerName="cloudkitty-db-sync" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.724458 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="809cb60d-1a06-4216-8f74-882aa0d7470e" containerName="cloudkitty-db-sync" Dec 02 19:02:30 crc kubenswrapper[4792]: E1202 19:02:30.724479 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b00586f-ffa2-4617-abe4-56758a897416" containerName="dnsmasq-dns" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.724485 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b00586f-ffa2-4617-abe4-56758a897416" containerName="dnsmasq-dns" Dec 02 19:02:30 crc kubenswrapper[4792]: E1202 19:02:30.724497 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b00586f-ffa2-4617-abe4-56758a897416" containerName="init" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.724507 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b00586f-ffa2-4617-abe4-56758a897416" containerName="init" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.724755 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b00586f-ffa2-4617-abe4-56758a897416" containerName="dnsmasq-dns" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.724765 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="809cb60d-1a06-4216-8f74-882aa0d7470e" containerName="cloudkitty-db-sync" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.725503 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.730168 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.734644 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-c5g69"] Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.879959 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.880041 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpdnd\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-kube-api-access-wpdnd\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.880069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-combined-ca-bundle\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.880124 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-certs\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.880258 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-scripts\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.981850 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-scripts\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.981912 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.981988 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpdnd\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-kube-api-access-wpdnd\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.982013 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-combined-ca-bundle\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.982044 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-certs\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.989634 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-scripts\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.989792 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.994904 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-certs\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.995463 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-combined-ca-bundle\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:30 crc kubenswrapper[4792]: I1202 19:02:30.999145 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpdnd\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-kube-api-access-wpdnd\") pod \"cloudkitty-storageinit-c5g69\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:31 crc kubenswrapper[4792]: I1202 19:02:31.041881 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:31 crc kubenswrapper[4792]: W1202 19:02:31.534311 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb580c6b_0812_49a4_8c58_07c8162c2fe3.slice/crio-4bc43dea693ffc8ed3f48add63cbe4ff6c6895fd52fa20741b200a65eb29c98b WatchSource:0}: Error finding container 4bc43dea693ffc8ed3f48add63cbe4ff6c6895fd52fa20741b200a65eb29c98b: Status 404 returned error can't find the container with id 4bc43dea693ffc8ed3f48add63cbe4ff6c6895fd52fa20741b200a65eb29c98b Dec 02 19:02:31 crc kubenswrapper[4792]: I1202 19:02:31.536803 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-c5g69"] Dec 02 19:02:31 crc kubenswrapper[4792]: I1202 19:02:31.550457 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="091d90c2-7a16-4474-8f4e-981342297fac" path="/var/lib/kubelet/pods/091d90c2-7a16-4474-8f4e-981342297fac/volumes" Dec 02 19:02:32 crc kubenswrapper[4792]: I1202 19:02:32.519942 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c5g69" event={"ID":"eb580c6b-0812-49a4-8c58-07c8162c2fe3","Type":"ContainerStarted","Data":"9efa3d85c4e1e62d471e28a54d1341303660ad36d1805984d4d3cf4df36773ac"} Dec 02 19:02:32 crc kubenswrapper[4792]: I1202 19:02:32.520312 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c5g69" event={"ID":"eb580c6b-0812-49a4-8c58-07c8162c2fe3","Type":"ContainerStarted","Data":"4bc43dea693ffc8ed3f48add63cbe4ff6c6895fd52fa20741b200a65eb29c98b"} Dec 02 19:02:32 crc kubenswrapper[4792]: I1202 19:02:32.553616 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-c5g69" podStartSLOduration=2.553592172 podStartE2EDuration="2.553592172s" podCreationTimestamp="2025-12-02 19:02:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:02:32.545721597 +0000 UTC m=+1583.318613935" watchObservedRunningTime="2025-12-02 19:02:32.553592172 +0000 UTC m=+1583.326484520" Dec 02 19:02:32 crc kubenswrapper[4792]: I1202 19:02:32.772703 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85f64749dc-lrjg9" Dec 02 19:02:32 crc kubenswrapper[4792]: I1202 19:02:32.865543 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-qknx2"] Dec 02 19:02:32 crc kubenswrapper[4792]: I1202 19:02:32.866062 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" podUID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerName="dnsmasq-dns" containerID="cri-o://ab2b5633405cead41e2b55d9f149b430f0281e4ed51a6b54c58884baa8ffadc5" gracePeriod=10 Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.530204 4792 generic.go:334] "Generic (PLEG): container finished" podID="eb580c6b-0812-49a4-8c58-07c8162c2fe3" containerID="9efa3d85c4e1e62d471e28a54d1341303660ad36d1805984d4d3cf4df36773ac" exitCode=0 Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.530293 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c5g69" event={"ID":"eb580c6b-0812-49a4-8c58-07c8162c2fe3","Type":"ContainerDied","Data":"9efa3d85c4e1e62d471e28a54d1341303660ad36d1805984d4d3cf4df36773ac"} Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.533342 4792 generic.go:334] "Generic (PLEG): container finished" podID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerID="ab2b5633405cead41e2b55d9f149b430f0281e4ed51a6b54c58884baa8ffadc5" exitCode=0 Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.533389 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" event={"ID":"a3cc6d17-81b1-431b-8af5-3fc48a65dc37","Type":"ContainerDied","Data":"ab2b5633405cead41e2b55d9f149b430f0281e4ed51a6b54c58884baa8ffadc5"} Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.533453 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" event={"ID":"a3cc6d17-81b1-431b-8af5-3fc48a65dc37","Type":"ContainerDied","Data":"34ef662e319a46a47593d583d660ead413e5476947aa4a2ac6a01963b9232bf5"} Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.533467 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34ef662e319a46a47593d583d660ead413e5476947aa4a2ac6a01963b9232bf5" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.557908 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.640784 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-openstack-edpm-ipam\") pod \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.640897 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-nb\") pod \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.640965 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-sb\") pod \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.641049 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbw87\" (UniqueName: \"kubernetes.io/projected/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-kube-api-access-xbw87\") pod \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.641075 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-config\") pod \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.641154 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-swift-storage-0\") pod \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.641217 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-svc\") pod \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\" (UID: \"a3cc6d17-81b1-431b-8af5-3fc48a65dc37\") " Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.653306 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-kube-api-access-xbw87" (OuterVolumeSpecName: "kube-api-access-xbw87") pod "a3cc6d17-81b1-431b-8af5-3fc48a65dc37" (UID: "a3cc6d17-81b1-431b-8af5-3fc48a65dc37"). InnerVolumeSpecName "kube-api-access-xbw87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.709781 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-config" (OuterVolumeSpecName: "config") pod "a3cc6d17-81b1-431b-8af5-3fc48a65dc37" (UID: "a3cc6d17-81b1-431b-8af5-3fc48a65dc37"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.712792 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a3cc6d17-81b1-431b-8af5-3fc48a65dc37" (UID: "a3cc6d17-81b1-431b-8af5-3fc48a65dc37"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.715063 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a3cc6d17-81b1-431b-8af5-3fc48a65dc37" (UID: "a3cc6d17-81b1-431b-8af5-3fc48a65dc37"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.726024 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "a3cc6d17-81b1-431b-8af5-3fc48a65dc37" (UID: "a3cc6d17-81b1-431b-8af5-3fc48a65dc37"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.734858 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a3cc6d17-81b1-431b-8af5-3fc48a65dc37" (UID: "a3cc6d17-81b1-431b-8af5-3fc48a65dc37"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.744168 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.744201 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.744211 4792 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.744220 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbw87\" (UniqueName: \"kubernetes.io/projected/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-kube-api-access-xbw87\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.744231 4792 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-config\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.744241 4792 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.747284 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a3cc6d17-81b1-431b-8af5-3fc48a65dc37" (UID: "a3cc6d17-81b1-431b-8af5-3fc48a65dc37"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:02:33 crc kubenswrapper[4792]: I1202 19:02:33.846024 4792 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3cc6d17-81b1-431b-8af5-3fc48a65dc37-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:34 crc kubenswrapper[4792]: I1202 19:02:34.547091 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-qknx2" Dec 02 19:02:34 crc kubenswrapper[4792]: I1202 19:02:34.612883 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-qknx2"] Dec 02 19:02:34 crc kubenswrapper[4792]: I1202 19:02:34.661886 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-qknx2"] Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.082306 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.171701 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-combined-ca-bundle\") pod \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.171854 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-certs\") pod \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.171921 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data\") pod \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.172058 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-scripts\") pod \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.172082 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpdnd\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-kube-api-access-wpdnd\") pod \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.177725 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-scripts" (OuterVolumeSpecName: "scripts") pod "eb580c6b-0812-49a4-8c58-07c8162c2fe3" (UID: "eb580c6b-0812-49a4-8c58-07c8162c2fe3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.177808 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-certs" (OuterVolumeSpecName: "certs") pod "eb580c6b-0812-49a4-8c58-07c8162c2fe3" (UID: "eb580c6b-0812-49a4-8c58-07c8162c2fe3"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.179344 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-kube-api-access-wpdnd" (OuterVolumeSpecName: "kube-api-access-wpdnd") pod "eb580c6b-0812-49a4-8c58-07c8162c2fe3" (UID: "eb580c6b-0812-49a4-8c58-07c8162c2fe3"). InnerVolumeSpecName "kube-api-access-wpdnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:35 crc kubenswrapper[4792]: E1202 19:02:35.214196 4792 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data podName:eb580c6b-0812-49a4-8c58-07c8162c2fe3 nodeName:}" failed. No retries permitted until 2025-12-02 19:02:35.714168432 +0000 UTC m=+1586.487060760 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data") pod "eb580c6b-0812-49a4-8c58-07c8162c2fe3" (UID: "eb580c6b-0812-49a4-8c58-07c8162c2fe3") : error deleting /var/lib/kubelet/pods/eb580c6b-0812-49a4-8c58-07c8162c2fe3/volume-subpaths: remove /var/lib/kubelet/pods/eb580c6b-0812-49a4-8c58-07c8162c2fe3/volume-subpaths: no such file or directory Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.218682 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb580c6b-0812-49a4-8c58-07c8162c2fe3" (UID: "eb580c6b-0812-49a4-8c58-07c8162c2fe3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.274954 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.274994 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpdnd\" (UniqueName: \"kubernetes.io/projected/eb580c6b-0812-49a4-8c58-07c8162c2fe3-kube-api-access-wpdnd\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.275014 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.275031 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.553860 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" path="/var/lib/kubelet/pods/a3cc6d17-81b1-431b-8af5-3fc48a65dc37/volumes" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.559026 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c5g69" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.562233 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c5g69" event={"ID":"eb580c6b-0812-49a4-8c58-07c8162c2fe3","Type":"ContainerDied","Data":"4bc43dea693ffc8ed3f48add63cbe4ff6c6895fd52fa20741b200a65eb29c98b"} Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.562319 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bc43dea693ffc8ed3f48add63cbe4ff6c6895fd52fa20741b200a65eb29c98b" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.688252 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.688590 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="4e89728d-8577-48e0-9452-0bd22ad3ff6d" containerName="cloudkitty-proc" containerID="cri-o://bbaa7b074942b09d92fd90a7481b2a1369c9c719e1ff5cfcb1560b06b1657a81" gracePeriod=30 Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.701230 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.701635 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api-log" containerID="cri-o://81d4ab47247a11dfb4482e877a0ac3a23bfe9e69a52046d67402330cedf913c6" gracePeriod=30 Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.702118 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api" containerID="cri-o://8986d95c722cf9982017b116352adf82f1d6b26bb936c977f371832d9ca9bf27" gracePeriod=30 Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.787820 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data\") pod \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\" (UID: \"eb580c6b-0812-49a4-8c58-07c8162c2fe3\") " Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.791820 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data" (OuterVolumeSpecName: "config-data") pod "eb580c6b-0812-49a4-8c58-07c8162c2fe3" (UID: "eb580c6b-0812-49a4-8c58-07c8162c2fe3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:35 crc kubenswrapper[4792]: I1202 19:02:35.891097 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb580c6b-0812-49a4-8c58-07c8162c2fe3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.521702 4792 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-api-0" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api" probeResult="failure" output="Get \"https://10.217.0.189:8889/healthcheck\": dial tcp 10.217.0.189:8889: connect: connection refused" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.540190 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:02:36 crc kubenswrapper[4792]: E1202 19:02:36.540750 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.576977 4792 generic.go:334] "Generic (PLEG): container finished" podID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerID="8986d95c722cf9982017b116352adf82f1d6b26bb936c977f371832d9ca9bf27" exitCode=0 Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.577028 4792 generic.go:334] "Generic (PLEG): container finished" podID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerID="81d4ab47247a11dfb4482e877a0ac3a23bfe9e69a52046d67402330cedf913c6" exitCode=143 Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.577144 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"80b99eef-7d18-47ff-baa1-8666aaa0cd86","Type":"ContainerDied","Data":"8986d95c722cf9982017b116352adf82f1d6b26bb936c977f371832d9ca9bf27"} Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.577185 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"80b99eef-7d18-47ff-baa1-8666aaa0cd86","Type":"ContainerDied","Data":"81d4ab47247a11dfb4482e877a0ac3a23bfe9e69a52046d67402330cedf913c6"} Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.584029 4792 generic.go:334] "Generic (PLEG): container finished" podID="4e89728d-8577-48e0-9452-0bd22ad3ff6d" containerID="bbaa7b074942b09d92fd90a7481b2a1369c9c719e1ff5cfcb1560b06b1657a81" exitCode=0 Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.584092 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4e89728d-8577-48e0-9452-0bd22ad3ff6d","Type":"ContainerDied","Data":"bbaa7b074942b09d92fd90a7481b2a1369c9c719e1ff5cfcb1560b06b1657a81"} Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.851087 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.858694 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.911821 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-internal-tls-certs\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.911887 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-certs\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.911909 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80b99eef-7d18-47ff-baa1-8666aaa0cd86-logs\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.911950 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7dhr\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-kube-api-access-r7dhr\") pod \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.911973 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-scripts\") pod \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912007 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data-custom\") pod \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912040 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data\") pod \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912113 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-certs\") pod \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912233 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-combined-ca-bundle\") pod \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\" (UID: \"4e89728d-8577-48e0-9452-0bd22ad3ff6d\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912266 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6bpv\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-kube-api-access-d6bpv\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912312 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-scripts\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912429 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data-custom\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912473 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-public-tls-certs\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912490 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.912512 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-combined-ca-bundle\") pod \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\" (UID: \"80b99eef-7d18-47ff-baa1-8666aaa0cd86\") " Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.938179 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-kube-api-access-r7dhr" (OuterVolumeSpecName: "kube-api-access-r7dhr") pod "4e89728d-8577-48e0-9452-0bd22ad3ff6d" (UID: "4e89728d-8577-48e0-9452-0bd22ad3ff6d"). InnerVolumeSpecName "kube-api-access-r7dhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.947696 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80b99eef-7d18-47ff-baa1-8666aaa0cd86-logs" (OuterVolumeSpecName: "logs") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.976215 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-scripts" (OuterVolumeSpecName: "scripts") pod "4e89728d-8577-48e0-9452-0bd22ad3ff6d" (UID: "4e89728d-8577-48e0-9452-0bd22ad3ff6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.982869 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4e89728d-8577-48e0-9452-0bd22ad3ff6d" (UID: "4e89728d-8577-48e0-9452-0bd22ad3ff6d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.982913 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-certs" (OuterVolumeSpecName: "certs") pod "4e89728d-8577-48e0-9452-0bd22ad3ff6d" (UID: "4e89728d-8577-48e0-9452-0bd22ad3ff6d"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.983295 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-kube-api-access-d6bpv" (OuterVolumeSpecName: "kube-api-access-d6bpv") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "kube-api-access-d6bpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.983427 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:36 crc kubenswrapper[4792]: I1202 19:02:36.984740 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-scripts" (OuterVolumeSpecName: "scripts") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.000038 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-certs" (OuterVolumeSpecName: "certs") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014084 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014118 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014128 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014136 4792 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80b99eef-7d18-47ff-baa1-8666aaa0cd86-logs\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014147 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7dhr\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-kube-api-access-r7dhr\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014157 4792 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014166 4792 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014174 4792 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4e89728d-8577-48e0-9452-0bd22ad3ff6d-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.014181 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6bpv\" (UniqueName: \"kubernetes.io/projected/80b99eef-7d18-47ff-baa1-8666aaa0cd86-kube-api-access-d6bpv\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.035875 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e89728d-8577-48e0-9452-0bd22ad3ff6d" (UID: "4e89728d-8577-48e0-9452-0bd22ad3ff6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.079886 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data" (OuterVolumeSpecName: "config-data") pod "4e89728d-8577-48e0-9452-0bd22ad3ff6d" (UID: "4e89728d-8577-48e0-9452-0bd22ad3ff6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.080187 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data" (OuterVolumeSpecName: "config-data") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.097662 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.115417 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.115448 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.115459 4792 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.115471 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e89728d-8577-48e0-9452-0bd22ad3ff6d-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.124443 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.136974 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "80b99eef-7d18-47ff-baa1-8666aaa0cd86" (UID: "80b99eef-7d18-47ff-baa1-8666aaa0cd86"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.217493 4792 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.217537 4792 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80b99eef-7d18-47ff-baa1-8666aaa0cd86-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.599379 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.599358 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4e89728d-8577-48e0-9452-0bd22ad3ff6d","Type":"ContainerDied","Data":"5bed5a34fe6db6cbe5f33993b8c814af1a5e0d0ef5759db1dabf2aa25a1b39e5"} Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.604604 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"80b99eef-7d18-47ff-baa1-8666aaa0cd86","Type":"ContainerDied","Data":"727a4e3a563c760e9a31de51e471cd7b4a01c16a0704278abb65b97298f9fcf5"} Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.604639 4792 scope.go:117] "RemoveContainer" containerID="bbaa7b074942b09d92fd90a7481b2a1369c9c719e1ff5cfcb1560b06b1657a81" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.601943 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.639426 4792 scope.go:117] "RemoveContainer" containerID="8986d95c722cf9982017b116352adf82f1d6b26bb936c977f371832d9ca9bf27" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.650132 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.676025 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.678913 4792 scope.go:117] "RemoveContainer" containerID="81d4ab47247a11dfb4482e877a0ac3a23bfe9e69a52046d67402330cedf913c6" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.686937 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.702559 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.714575 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: E1202 19:02:37.715022 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e89728d-8577-48e0-9452-0bd22ad3ff6d" containerName="cloudkitty-proc" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715039 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e89728d-8577-48e0-9452-0bd22ad3ff6d" containerName="cloudkitty-proc" Dec 02 19:02:37 crc kubenswrapper[4792]: E1202 19:02:37.715056 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerName="dnsmasq-dns" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715064 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerName="dnsmasq-dns" Dec 02 19:02:37 crc kubenswrapper[4792]: E1202 19:02:37.715079 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerName="init" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715085 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerName="init" Dec 02 19:02:37 crc kubenswrapper[4792]: E1202 19:02:37.715096 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb580c6b-0812-49a4-8c58-07c8162c2fe3" containerName="cloudkitty-storageinit" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715101 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb580c6b-0812-49a4-8c58-07c8162c2fe3" containerName="cloudkitty-storageinit" Dec 02 19:02:37 crc kubenswrapper[4792]: E1202 19:02:37.715112 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api-log" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715118 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api-log" Dec 02 19:02:37 crc kubenswrapper[4792]: E1202 19:02:37.715131 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715137 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715317 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e89728d-8577-48e0-9452-0bd22ad3ff6d" containerName="cloudkitty-proc" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715328 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715341 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" containerName="cloudkitty-api-log" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715360 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb580c6b-0812-49a4-8c58-07c8162c2fe3" containerName="cloudkitty-storageinit" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.715370 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3cc6d17-81b1-431b-8af5-3fc48a65dc37" containerName="dnsmasq-dns" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.716041 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.719207 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.724854 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.733629 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.735512 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.737128 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.739228 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.739507 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-qndk8" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.739708 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.739848 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.740127 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.740382 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.786208 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.831811 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-config-data\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.831860 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.831941 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.831989 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2cbb2471-1100-45f2-9279-c15ef98e34cf-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832014 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-certs\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832034 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-scripts\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832158 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cbb2471-1100-45f2-9279-c15ef98e34cf-logs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832224 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9ftz\" (UniqueName: \"kubernetes.io/projected/2cbb2471-1100-45f2-9279-c15ef98e34cf-kube-api-access-t9ftz\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832256 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832462 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832605 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832728 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832820 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.832931 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6sfr\" (UniqueName: \"kubernetes.io/projected/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-kube-api-access-f6sfr\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.833094 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.934908 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cbb2471-1100-45f2-9279-c15ef98e34cf-logs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.934973 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9ftz\" (UniqueName: \"kubernetes.io/projected/2cbb2471-1100-45f2-9279-c15ef98e34cf-kube-api-access-t9ftz\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.934998 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.935045 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.935068 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.935331 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2cbb2471-1100-45f2-9279-c15ef98e34cf-logs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936340 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936377 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936418 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6sfr\" (UniqueName: \"kubernetes.io/projected/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-kube-api-access-f6sfr\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936477 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936567 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-config-data\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936589 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936636 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936664 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2cbb2471-1100-45f2-9279-c15ef98e34cf-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936688 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-certs\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.936709 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-scripts\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.939807 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.939959 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.940347 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.942537 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2cbb2471-1100-45f2-9279-c15ef98e34cf-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.943763 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-config-data\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.946070 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-scripts\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.946245 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-certs\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.946817 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.946861 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cbb2471-1100-45f2-9279-c15ef98e34cf-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.946878 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.947174 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.947878 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.955814 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9ftz\" (UniqueName: \"kubernetes.io/projected/2cbb2471-1100-45f2-9279-c15ef98e34cf-kube-api-access-t9ftz\") pod \"cloudkitty-api-0\" (UID: \"2cbb2471-1100-45f2-9279-c15ef98e34cf\") " pod="openstack/cloudkitty-api-0" Dec 02 19:02:37 crc kubenswrapper[4792]: I1202 19:02:37.959579 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6sfr\" (UniqueName: \"kubernetes.io/projected/b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1-kube-api-access-f6sfr\") pod \"cloudkitty-proc-0\" (UID: \"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1\") " pod="openstack/cloudkitty-proc-0" Dec 02 19:02:38 crc kubenswrapper[4792]: I1202 19:02:38.048023 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 02 19:02:38 crc kubenswrapper[4792]: I1202 19:02:38.073376 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 02 19:02:38 crc kubenswrapper[4792]: I1202 19:02:38.582107 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 02 19:02:38 crc kubenswrapper[4792]: W1202 19:02:38.584130 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3b138aa_fe70_4c2d_9f02_64a8f5a96ae1.slice/crio-4a79af572a7c849592fe3374097631e64bc987cf2dd4f329dc23ebd70a345c6a WatchSource:0}: Error finding container 4a79af572a7c849592fe3374097631e64bc987cf2dd4f329dc23ebd70a345c6a: Status 404 returned error can't find the container with id 4a79af572a7c849592fe3374097631e64bc987cf2dd4f329dc23ebd70a345c6a Dec 02 19:02:38 crc kubenswrapper[4792]: I1202 19:02:38.631987 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1","Type":"ContainerStarted","Data":"4a79af572a7c849592fe3374097631e64bc987cf2dd4f329dc23ebd70a345c6a"} Dec 02 19:02:38 crc kubenswrapper[4792]: I1202 19:02:38.676633 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 02 19:02:39 crc kubenswrapper[4792]: I1202 19:02:39.553574 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e89728d-8577-48e0-9452-0bd22ad3ff6d" path="/var/lib/kubelet/pods/4e89728d-8577-48e0-9452-0bd22ad3ff6d/volumes" Dec 02 19:02:39 crc kubenswrapper[4792]: I1202 19:02:39.555336 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b99eef-7d18-47ff-baa1-8666aaa0cd86" path="/var/lib/kubelet/pods/80b99eef-7d18-47ff-baa1-8666aaa0cd86/volumes" Dec 02 19:02:39 crc kubenswrapper[4792]: I1202 19:02:39.651817 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2cbb2471-1100-45f2-9279-c15ef98e34cf","Type":"ContainerStarted","Data":"5e13a3e71c55bff3660462ec59e2f85e323fc9a44c6efb1747b5cf18020cb787"} Dec 02 19:02:39 crc kubenswrapper[4792]: I1202 19:02:39.651861 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2cbb2471-1100-45f2-9279-c15ef98e34cf","Type":"ContainerStarted","Data":"fec46eb1aae0f85484581f2ccddcd3ffe48788bfb0ebf3033d9492d2c65ff65c"} Dec 02 19:02:39 crc kubenswrapper[4792]: I1202 19:02:39.651873 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2cbb2471-1100-45f2-9279-c15ef98e34cf","Type":"ContainerStarted","Data":"ec775a986fa60720d760aea767a0eddcc8d4c5d0fc26c454b4447f17b771ebac"} Dec 02 19:02:39 crc kubenswrapper[4792]: I1202 19:02:39.651981 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 02 19:02:39 crc kubenswrapper[4792]: I1202 19:02:39.695680 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.695612141 podStartE2EDuration="2.695612141s" podCreationTimestamp="2025-12-02 19:02:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:02:39.6813662 +0000 UTC m=+1590.454258528" watchObservedRunningTime="2025-12-02 19:02:39.695612141 +0000 UTC m=+1590.468504509" Dec 02 19:02:40 crc kubenswrapper[4792]: I1202 19:02:40.666415 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1","Type":"ContainerStarted","Data":"be639d6dcb0d32e622c28824760ce5e8a802b65cec8bd2c29ffb8f5b8744395f"} Dec 02 19:02:40 crc kubenswrapper[4792]: I1202 19:02:40.688756 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.6429212890000002 podStartE2EDuration="3.688736288s" podCreationTimestamp="2025-12-02 19:02:37 +0000 UTC" firstStartedPulling="2025-12-02 19:02:38.59058603 +0000 UTC m=+1589.363478358" lastFinishedPulling="2025-12-02 19:02:39.636401019 +0000 UTC m=+1590.409293357" observedRunningTime="2025-12-02 19:02:40.687059434 +0000 UTC m=+1591.459951772" watchObservedRunningTime="2025-12-02 19:02:40.688736288 +0000 UTC m=+1591.461628636" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.200157 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn"] Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.203718 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.210556 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.210957 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.214511 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.214828 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.256122 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn"] Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.305912 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.305980 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd4gz\" (UniqueName: \"kubernetes.io/projected/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-kube-api-access-sd4gz\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.306356 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.306499 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.408592 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.408978 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd4gz\" (UniqueName: \"kubernetes.io/projected/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-kube-api-access-sd4gz\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.409417 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.409901 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.414969 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.415163 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.415543 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.437441 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd4gz\" (UniqueName: \"kubernetes.io/projected/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-kube-api-access-sd4gz\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:45 crc kubenswrapper[4792]: I1202 19:02:45.537720 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:02:46 crc kubenswrapper[4792]: I1202 19:02:46.202724 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn"] Dec 02 19:02:46 crc kubenswrapper[4792]: W1202 19:02:46.212932 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda93efefe_ec0a_45dc_8276_6d99cb2b4db8.slice/crio-8b8b6f99797f5dc70e11017f7a958125be593e1c4058102a535be8658b4587a7 WatchSource:0}: Error finding container 8b8b6f99797f5dc70e11017f7a958125be593e1c4058102a535be8658b4587a7: Status 404 returned error can't find the container with id 8b8b6f99797f5dc70e11017f7a958125be593e1c4058102a535be8658b4587a7 Dec 02 19:02:46 crc kubenswrapper[4792]: I1202 19:02:46.754028 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" event={"ID":"a93efefe-ec0a-45dc-8276-6d99cb2b4db8","Type":"ContainerStarted","Data":"8b8b6f99797f5dc70e11017f7a958125be593e1c4058102a535be8658b4587a7"} Dec 02 19:02:48 crc kubenswrapper[4792]: I1202 19:02:48.789697 4792 generic.go:334] "Generic (PLEG): container finished" podID="c43c55d9-74e9-4158-a193-ee8ead807ad7" containerID="eb341509a5e5e14b7592bba8d0f7320ab6061c13d411e7bf78c7f5d1fd729108" exitCode=0 Dec 02 19:02:48 crc kubenswrapper[4792]: I1202 19:02:48.789783 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c43c55d9-74e9-4158-a193-ee8ead807ad7","Type":"ContainerDied","Data":"eb341509a5e5e14b7592bba8d0f7320ab6061c13d411e7bf78c7f5d1fd729108"} Dec 02 19:02:49 crc kubenswrapper[4792]: I1202 19:02:49.580881 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:02:49 crc kubenswrapper[4792]: E1202 19:02:49.582448 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:02:49 crc kubenswrapper[4792]: I1202 19:02:49.809249 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c43c55d9-74e9-4158-a193-ee8ead807ad7","Type":"ContainerStarted","Data":"351023d85828e37a57a27738db6bd109b8ba354f88f6166233602a80a06582be"} Dec 02 19:02:49 crc kubenswrapper[4792]: I1202 19:02:49.809573 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:02:49 crc kubenswrapper[4792]: I1202 19:02:49.848053 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.848026129 podStartE2EDuration="36.848026129s" podCreationTimestamp="2025-12-02 19:02:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:02:49.838678886 +0000 UTC m=+1600.611571244" watchObservedRunningTime="2025-12-02 19:02:49.848026129 +0000 UTC m=+1600.620918497" Dec 02 19:02:50 crc kubenswrapper[4792]: E1202 19:02:50.105703 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod197e738b_95d3_4250_b16a_e70331f46ba5.slice/crio-8101c4a85920cba2963f7f4702b50ed24cbc7f9ba3d1f986b2bf1b73be9826af.scope\": RecentStats: unable to find data in memory cache]" Dec 02 19:02:50 crc kubenswrapper[4792]: I1202 19:02:50.820419 4792 generic.go:334] "Generic (PLEG): container finished" podID="197e738b-95d3-4250-b16a-e70331f46ba5" containerID="8101c4a85920cba2963f7f4702b50ed24cbc7f9ba3d1f986b2bf1b73be9826af" exitCode=0 Dec 02 19:02:50 crc kubenswrapper[4792]: I1202 19:02:50.820571 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"197e738b-95d3-4250-b16a-e70331f46ba5","Type":"ContainerDied","Data":"8101c4a85920cba2963f7f4702b50ed24cbc7f9ba3d1f986b2bf1b73be9826af"} Dec 02 19:02:58 crc kubenswrapper[4792]: I1202 19:02:58.938217 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"197e738b-95d3-4250-b16a-e70331f46ba5","Type":"ContainerStarted","Data":"edf75e947fca0cfbaf6711699d79e03d36f0ba6087a02a302b90a5a9a9f69745"} Dec 02 19:02:58 crc kubenswrapper[4792]: I1202 19:02:58.941080 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 19:02:58 crc kubenswrapper[4792]: I1202 19:02:58.944388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" event={"ID":"a93efefe-ec0a-45dc-8276-6d99cb2b4db8","Type":"ContainerStarted","Data":"9fcf9b2312fffafb6c40b9818dd9d309564b463e08d97c65f4d4de24870d73b5"} Dec 02 19:02:58 crc kubenswrapper[4792]: I1202 19:02:58.993416 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=44.993397247 podStartE2EDuration="44.993397247s" podCreationTimestamp="2025-12-02 19:02:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:02:58.972555174 +0000 UTC m=+1609.745447512" watchObservedRunningTime="2025-12-02 19:02:58.993397247 +0000 UTC m=+1609.766289575" Dec 02 19:02:59 crc kubenswrapper[4792]: I1202 19:02:59.018019 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" podStartSLOduration=1.858239235 podStartE2EDuration="14.018002687s" podCreationTimestamp="2025-12-02 19:02:45 +0000 UTC" firstStartedPulling="2025-12-02 19:02:46.220002119 +0000 UTC m=+1596.992894437" lastFinishedPulling="2025-12-02 19:02:58.379765551 +0000 UTC m=+1609.152657889" observedRunningTime="2025-12-02 19:02:59.004804614 +0000 UTC m=+1609.777696942" watchObservedRunningTime="2025-12-02 19:02:59.018002687 +0000 UTC m=+1609.790895015" Dec 02 19:03:01 crc kubenswrapper[4792]: I1202 19:03:01.540752 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:03:01 crc kubenswrapper[4792]: E1202 19:03:01.541546 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:03:03 crc kubenswrapper[4792]: I1202 19:03:03.688809 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 19:03:09 crc kubenswrapper[4792]: I1202 19:03:09.094661 4792 generic.go:334] "Generic (PLEG): container finished" podID="a93efefe-ec0a-45dc-8276-6d99cb2b4db8" containerID="9fcf9b2312fffafb6c40b9818dd9d309564b463e08d97c65f4d4de24870d73b5" exitCode=0 Dec 02 19:03:09 crc kubenswrapper[4792]: I1202 19:03:09.094679 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" event={"ID":"a93efefe-ec0a-45dc-8276-6d99cb2b4db8","Type":"ContainerDied","Data":"9fcf9b2312fffafb6c40b9818dd9d309564b463e08d97c65f4d4de24870d73b5"} Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.592777 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.615863 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-repo-setup-combined-ca-bundle\") pod \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.615910 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-inventory\") pod \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.616181 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-ssh-key\") pod \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.616217 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd4gz\" (UniqueName: \"kubernetes.io/projected/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-kube-api-access-sd4gz\") pod \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\" (UID: \"a93efefe-ec0a-45dc-8276-6d99cb2b4db8\") " Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.623819 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "a93efefe-ec0a-45dc-8276-6d99cb2b4db8" (UID: "a93efefe-ec0a-45dc-8276-6d99cb2b4db8"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.623816 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-kube-api-access-sd4gz" (OuterVolumeSpecName: "kube-api-access-sd4gz") pod "a93efefe-ec0a-45dc-8276-6d99cb2b4db8" (UID: "a93efefe-ec0a-45dc-8276-6d99cb2b4db8"). InnerVolumeSpecName "kube-api-access-sd4gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.645459 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a93efefe-ec0a-45dc-8276-6d99cb2b4db8" (UID: "a93efefe-ec0a-45dc-8276-6d99cb2b4db8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.673200 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-inventory" (OuterVolumeSpecName: "inventory") pod "a93efefe-ec0a-45dc-8276-6d99cb2b4db8" (UID: "a93efefe-ec0a-45dc-8276-6d99cb2b4db8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.718497 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.718560 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd4gz\" (UniqueName: \"kubernetes.io/projected/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-kube-api-access-sd4gz\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.718591 4792 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:10 crc kubenswrapper[4792]: I1202 19:03:10.718612 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a93efefe-ec0a-45dc-8276-6d99cb2b4db8-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.123577 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" event={"ID":"a93efefe-ec0a-45dc-8276-6d99cb2b4db8","Type":"ContainerDied","Data":"8b8b6f99797f5dc70e11017f7a958125be593e1c4058102a535be8658b4587a7"} Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.123616 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b8b6f99797f5dc70e11017f7a958125be593e1c4058102a535be8658b4587a7" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.123719 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.224357 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9"] Dec 02 19:03:11 crc kubenswrapper[4792]: E1202 19:03:11.224975 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a93efefe-ec0a-45dc-8276-6d99cb2b4db8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.225002 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="a93efefe-ec0a-45dc-8276-6d99cb2b4db8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.225229 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="a93efefe-ec0a-45dc-8276-6d99cb2b4db8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.225924 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.236789 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9"] Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.267864 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.267949 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.267976 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.268023 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.329627 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.329704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpwtz\" (UniqueName: \"kubernetes.io/projected/8133173a-74f1-44d0-ab0d-609e15a2754a-kube-api-access-dpwtz\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.329818 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.432241 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.432516 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.432630 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpwtz\" (UniqueName: \"kubernetes.io/projected/8133173a-74f1-44d0-ab0d-609e15a2754a-kube-api-access-dpwtz\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.437783 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.438285 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.453187 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpwtz\" (UniqueName: \"kubernetes.io/projected/8133173a-74f1-44d0-ab0d-609e15a2754a-kube-api-access-dpwtz\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-6cgm9\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:11 crc kubenswrapper[4792]: I1202 19:03:11.606509 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:12 crc kubenswrapper[4792]: I1202 19:03:12.225377 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9"] Dec 02 19:03:12 crc kubenswrapper[4792]: W1202 19:03:12.230024 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8133173a_74f1_44d0_ab0d_609e15a2754a.slice/crio-5893a13b72c25f04e409dcee0795763ac8aa1a5a655a14852791878e3ee5b7a7 WatchSource:0}: Error finding container 5893a13b72c25f04e409dcee0795763ac8aa1a5a655a14852791878e3ee5b7a7: Status 404 returned error can't find the container with id 5893a13b72c25f04e409dcee0795763ac8aa1a5a655a14852791878e3ee5b7a7 Dec 02 19:03:13 crc kubenswrapper[4792]: I1202 19:03:13.152081 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" event={"ID":"8133173a-74f1-44d0-ab0d-609e15a2754a","Type":"ContainerStarted","Data":"1d890d9fc8d3476e58395db7576cf19dc69e8272c816fa45035617fc51e75e85"} Dec 02 19:03:13 crc kubenswrapper[4792]: I1202 19:03:13.152904 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" event={"ID":"8133173a-74f1-44d0-ab0d-609e15a2754a","Type":"ContainerStarted","Data":"5893a13b72c25f04e409dcee0795763ac8aa1a5a655a14852791878e3ee5b7a7"} Dec 02 19:03:13 crc kubenswrapper[4792]: I1202 19:03:13.169501 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" podStartSLOduration=1.647720605 podStartE2EDuration="2.169481648s" podCreationTimestamp="2025-12-02 19:03:11 +0000 UTC" firstStartedPulling="2025-12-02 19:03:12.235842571 +0000 UTC m=+1623.008734939" lastFinishedPulling="2025-12-02 19:03:12.757603644 +0000 UTC m=+1623.530495982" observedRunningTime="2025-12-02 19:03:13.168548324 +0000 UTC m=+1623.941440662" watchObservedRunningTime="2025-12-02 19:03:13.169481648 +0000 UTC m=+1623.942373996" Dec 02 19:03:14 crc kubenswrapper[4792]: I1202 19:03:14.539478 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:03:14 crc kubenswrapper[4792]: E1202 19:03:14.540072 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:03:14 crc kubenswrapper[4792]: I1202 19:03:14.752860 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 19:03:14 crc kubenswrapper[4792]: I1202 19:03:14.859411 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.591460 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-crmpd"] Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.595270 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crmpd"] Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.595409 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.632997 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-utilities\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.633047 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-catalog-content\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.633123 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzngb\" (UniqueName: \"kubernetes.io/projected/30d956ed-b602-4760-a856-6e47c4314495-kube-api-access-kzngb\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.735353 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-utilities\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.735419 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-catalog-content\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.735497 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzngb\" (UniqueName: \"kubernetes.io/projected/30d956ed-b602-4760-a856-6e47c4314495-kube-api-access-kzngb\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.736007 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-utilities\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.736028 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-catalog-content\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.760593 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzngb\" (UniqueName: \"kubernetes.io/projected/30d956ed-b602-4760-a856-6e47c4314495-kube-api-access-kzngb\") pod \"certified-operators-crmpd\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:15 crc kubenswrapper[4792]: I1202 19:03:15.927224 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:16 crc kubenswrapper[4792]: I1202 19:03:16.190446 4792 generic.go:334] "Generic (PLEG): container finished" podID="8133173a-74f1-44d0-ab0d-609e15a2754a" containerID="1d890d9fc8d3476e58395db7576cf19dc69e8272c816fa45035617fc51e75e85" exitCode=0 Dec 02 19:03:16 crc kubenswrapper[4792]: I1202 19:03:16.190480 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" event={"ID":"8133173a-74f1-44d0-ab0d-609e15a2754a","Type":"ContainerDied","Data":"1d890d9fc8d3476e58395db7576cf19dc69e8272c816fa45035617fc51e75e85"} Dec 02 19:03:16 crc kubenswrapper[4792]: I1202 19:03:16.423225 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crmpd"] Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.206771 4792 generic.go:334] "Generic (PLEG): container finished" podID="30d956ed-b602-4760-a856-6e47c4314495" containerID="2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803" exitCode=0 Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.206860 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crmpd" event={"ID":"30d956ed-b602-4760-a856-6e47c4314495","Type":"ContainerDied","Data":"2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803"} Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.207105 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crmpd" event={"ID":"30d956ed-b602-4760-a856-6e47c4314495","Type":"ContainerStarted","Data":"429e69f9dec1fcb7c5815ccb4cc222560de0d39eb2bcf86d65d49d4afa8310f4"} Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.669217 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.692630 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-inventory\") pod \"8133173a-74f1-44d0-ab0d-609e15a2754a\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.692774 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpwtz\" (UniqueName: \"kubernetes.io/projected/8133173a-74f1-44d0-ab0d-609e15a2754a-kube-api-access-dpwtz\") pod \"8133173a-74f1-44d0-ab0d-609e15a2754a\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.692831 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-ssh-key\") pod \"8133173a-74f1-44d0-ab0d-609e15a2754a\" (UID: \"8133173a-74f1-44d0-ab0d-609e15a2754a\") " Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.698880 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8133173a-74f1-44d0-ab0d-609e15a2754a-kube-api-access-dpwtz" (OuterVolumeSpecName: "kube-api-access-dpwtz") pod "8133173a-74f1-44d0-ab0d-609e15a2754a" (UID: "8133173a-74f1-44d0-ab0d-609e15a2754a"). InnerVolumeSpecName "kube-api-access-dpwtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.725674 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-inventory" (OuterVolumeSpecName: "inventory") pod "8133173a-74f1-44d0-ab0d-609e15a2754a" (UID: "8133173a-74f1-44d0-ab0d-609e15a2754a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.732800 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8133173a-74f1-44d0-ab0d-609e15a2754a" (UID: "8133173a-74f1-44d0-ab0d-609e15a2754a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.797258 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.797287 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpwtz\" (UniqueName: \"kubernetes.io/projected/8133173a-74f1-44d0-ab0d-609e15a2754a-kube-api-access-dpwtz\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:17 crc kubenswrapper[4792]: I1202 19:03:17.797298 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8133173a-74f1-44d0-ab0d-609e15a2754a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.221968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" event={"ID":"8133173a-74f1-44d0-ab0d-609e15a2754a","Type":"ContainerDied","Data":"5893a13b72c25f04e409dcee0795763ac8aa1a5a655a14852791878e3ee5b7a7"} Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.222005 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5893a13b72c25f04e409dcee0795763ac8aa1a5a655a14852791878e3ee5b7a7" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.222028 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-6cgm9" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.304529 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw"] Dec 02 19:03:18 crc kubenswrapper[4792]: E1202 19:03:18.304952 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8133173a-74f1-44d0-ab0d-609e15a2754a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.304969 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="8133173a-74f1-44d0-ab0d-609e15a2754a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.305161 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="8133173a-74f1-44d0-ab0d-609e15a2754a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.305984 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.309094 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.311696 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.311872 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.312020 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.324331 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw"] Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.409672 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz4tc\" (UniqueName: \"kubernetes.io/projected/e29a810b-8a51-4d2c-ab9e-61315499b272-kube-api-access-fz4tc\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.409885 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.410037 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.410121 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.511923 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.511998 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.512063 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz4tc\" (UniqueName: \"kubernetes.io/projected/e29a810b-8a51-4d2c-ab9e-61315499b272-kube-api-access-fz4tc\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.512131 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.518945 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.524143 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.533280 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.546658 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz4tc\" (UniqueName: \"kubernetes.io/projected/e29a810b-8a51-4d2c-ab9e-61315499b272-kube-api-access-fz4tc\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:18 crc kubenswrapper[4792]: I1202 19:03:18.672600 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:03:19 crc kubenswrapper[4792]: I1202 19:03:19.212262 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw"] Dec 02 19:03:20 crc kubenswrapper[4792]: I1202 19:03:20.245138 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" event={"ID":"e29a810b-8a51-4d2c-ab9e-61315499b272","Type":"ContainerStarted","Data":"438a4083ba3a888cf2936a6b451db6719db9f7e5badacc9ad996220efc491142"} Dec 02 19:03:21 crc kubenswrapper[4792]: I1202 19:03:21.273124 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crmpd" event={"ID":"30d956ed-b602-4760-a856-6e47c4314495","Type":"ContainerStarted","Data":"85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc"} Dec 02 19:03:22 crc kubenswrapper[4792]: I1202 19:03:22.285262 4792 generic.go:334] "Generic (PLEG): container finished" podID="30d956ed-b602-4760-a856-6e47c4314495" containerID="85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc" exitCode=0 Dec 02 19:03:22 crc kubenswrapper[4792]: I1202 19:03:22.285340 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crmpd" event={"ID":"30d956ed-b602-4760-a856-6e47c4314495","Type":"ContainerDied","Data":"85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc"} Dec 02 19:03:24 crc kubenswrapper[4792]: I1202 19:03:24.314349 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" event={"ID":"e29a810b-8a51-4d2c-ab9e-61315499b272","Type":"ContainerStarted","Data":"fa8662c93df49da6493b15490dedef80df76d083ca291fc327a50cc452f44bd7"} Dec 02 19:03:24 crc kubenswrapper[4792]: I1202 19:03:24.352608 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" podStartSLOduration=3.4950962150000002 podStartE2EDuration="6.352586276s" podCreationTimestamp="2025-12-02 19:03:18 +0000 UTC" firstStartedPulling="2025-12-02 19:03:19.228187095 +0000 UTC m=+1630.001079423" lastFinishedPulling="2025-12-02 19:03:22.085677156 +0000 UTC m=+1632.858569484" observedRunningTime="2025-12-02 19:03:24.339106555 +0000 UTC m=+1635.111998883" watchObservedRunningTime="2025-12-02 19:03:24.352586276 +0000 UTC m=+1635.125478624" Dec 02 19:03:25 crc kubenswrapper[4792]: I1202 19:03:25.539554 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:03:25 crc kubenswrapper[4792]: E1202 19:03:25.540014 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:03:26 crc kubenswrapper[4792]: I1202 19:03:26.335377 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crmpd" event={"ID":"30d956ed-b602-4760-a856-6e47c4314495","Type":"ContainerStarted","Data":"ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be"} Dec 02 19:03:26 crc kubenswrapper[4792]: I1202 19:03:26.368556 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-crmpd" podStartSLOduration=3.563034368 podStartE2EDuration="11.368507694s" podCreationTimestamp="2025-12-02 19:03:15 +0000 UTC" firstStartedPulling="2025-12-02 19:03:17.21041606 +0000 UTC m=+1627.983308408" lastFinishedPulling="2025-12-02 19:03:25.015889396 +0000 UTC m=+1635.788781734" observedRunningTime="2025-12-02 19:03:26.358508693 +0000 UTC m=+1637.131401021" watchObservedRunningTime="2025-12-02 19:03:26.368507694 +0000 UTC m=+1637.141400052" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.514630 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-khxqd"] Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.517715 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.532785 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-khxqd"] Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.563704 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-utilities\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.563788 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfq87\" (UniqueName: \"kubernetes.io/projected/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-kube-api-access-zfq87\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.563985 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-catalog-content\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.666381 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-catalog-content\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.666476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-utilities\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.666515 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfq87\" (UniqueName: \"kubernetes.io/projected/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-kube-api-access-zfq87\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.666944 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-catalog-content\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.667048 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-utilities\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.697971 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfq87\" (UniqueName: \"kubernetes.io/projected/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-kube-api-access-zfq87\") pod \"redhat-marketplace-khxqd\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.847566 4792 scope.go:117] "RemoveContainer" containerID="f1b9ba7f0083bba8aef8290a87652c513046541054a83ecdeb018a9f4b84aac1" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.859089 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:33 crc kubenswrapper[4792]: I1202 19:03:33.875085 4792 scope.go:117] "RemoveContainer" containerID="28a6e102bd8aca4f2435f803b9316007e7b6db5356fc8621db7b90eb4d633390" Dec 02 19:03:34 crc kubenswrapper[4792]: I1202 19:03:34.020681 4792 scope.go:117] "RemoveContainer" containerID="194c00decb1cdaa61c023c7bd7f2a15f6d3e6afc03290996bed646e5d9f93467" Dec 02 19:03:34 crc kubenswrapper[4792]: I1202 19:03:34.069173 4792 scope.go:117] "RemoveContainer" containerID="bcdb42e0a9fdd644353747906bb3bb9cd22fa5854c75a7389e00d2099fb21b87" Dec 02 19:03:34 crc kubenswrapper[4792]: I1202 19:03:34.462607 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-khxqd"] Dec 02 19:03:35 crc kubenswrapper[4792]: I1202 19:03:35.455789 4792 generic.go:334] "Generic (PLEG): container finished" podID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerID="90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae" exitCode=0 Dec 02 19:03:35 crc kubenswrapper[4792]: I1202 19:03:35.455846 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-khxqd" event={"ID":"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a","Type":"ContainerDied","Data":"90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae"} Dec 02 19:03:35 crc kubenswrapper[4792]: I1202 19:03:35.456384 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-khxqd" event={"ID":"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a","Type":"ContainerStarted","Data":"e3c98f3afee29a2e264def2e9fc1ee1c6687ae2771f64ef5a526ebbc21b1f265"} Dec 02 19:03:35 crc kubenswrapper[4792]: I1202 19:03:35.460160 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:03:35 crc kubenswrapper[4792]: I1202 19:03:35.928941 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:35 crc kubenswrapper[4792]: I1202 19:03:35.929253 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:36 crc kubenswrapper[4792]: I1202 19:03:36.019557 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:36 crc kubenswrapper[4792]: I1202 19:03:36.468317 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-khxqd" event={"ID":"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a","Type":"ContainerStarted","Data":"12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89"} Dec 02 19:03:36 crc kubenswrapper[4792]: I1202 19:03:36.518391 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:37 crc kubenswrapper[4792]: I1202 19:03:37.485424 4792 generic.go:334] "Generic (PLEG): container finished" podID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerID="12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89" exitCode=0 Dec 02 19:03:37 crc kubenswrapper[4792]: I1202 19:03:37.485470 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-khxqd" event={"ID":"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a","Type":"ContainerDied","Data":"12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89"} Dec 02 19:03:38 crc kubenswrapper[4792]: I1202 19:03:38.298783 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crmpd"] Dec 02 19:03:38 crc kubenswrapper[4792]: I1202 19:03:38.496666 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-khxqd" event={"ID":"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a","Type":"ContainerStarted","Data":"0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf"} Dec 02 19:03:39 crc kubenswrapper[4792]: I1202 19:03:39.508491 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-crmpd" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="registry-server" containerID="cri-o://ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be" gracePeriod=2 Dec 02 19:03:39 crc kubenswrapper[4792]: I1202 19:03:39.546332 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:03:39 crc kubenswrapper[4792]: E1202 19:03:39.546624 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.081401 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.110452 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-khxqd" podStartSLOduration=4.632421392 podStartE2EDuration="7.110430425s" podCreationTimestamp="2025-12-02 19:03:33 +0000 UTC" firstStartedPulling="2025-12-02 19:03:35.459851755 +0000 UTC m=+1646.232744103" lastFinishedPulling="2025-12-02 19:03:37.937860808 +0000 UTC m=+1648.710753136" observedRunningTime="2025-12-02 19:03:38.517162543 +0000 UTC m=+1649.290054861" watchObservedRunningTime="2025-12-02 19:03:40.110430425 +0000 UTC m=+1650.883322753" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.126276 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-utilities\") pod \"30d956ed-b602-4760-a856-6e47c4314495\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.126420 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzngb\" (UniqueName: \"kubernetes.io/projected/30d956ed-b602-4760-a856-6e47c4314495-kube-api-access-kzngb\") pod \"30d956ed-b602-4760-a856-6e47c4314495\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.126594 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-catalog-content\") pod \"30d956ed-b602-4760-a856-6e47c4314495\" (UID: \"30d956ed-b602-4760-a856-6e47c4314495\") " Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.127390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-utilities" (OuterVolumeSpecName: "utilities") pod "30d956ed-b602-4760-a856-6e47c4314495" (UID: "30d956ed-b602-4760-a856-6e47c4314495"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.132641 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30d956ed-b602-4760-a856-6e47c4314495-kube-api-access-kzngb" (OuterVolumeSpecName: "kube-api-access-kzngb") pod "30d956ed-b602-4760-a856-6e47c4314495" (UID: "30d956ed-b602-4760-a856-6e47c4314495"). InnerVolumeSpecName "kube-api-access-kzngb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.188390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30d956ed-b602-4760-a856-6e47c4314495" (UID: "30d956ed-b602-4760-a856-6e47c4314495"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.229092 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.229324 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzngb\" (UniqueName: \"kubernetes.io/projected/30d956ed-b602-4760-a856-6e47c4314495-kube-api-access-kzngb\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.229435 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30d956ed-b602-4760-a856-6e47c4314495-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.528314 4792 generic.go:334] "Generic (PLEG): container finished" podID="30d956ed-b602-4760-a856-6e47c4314495" containerID="ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be" exitCode=0 Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.528394 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crmpd" event={"ID":"30d956ed-b602-4760-a856-6e47c4314495","Type":"ContainerDied","Data":"ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be"} Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.528451 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crmpd" event={"ID":"30d956ed-b602-4760-a856-6e47c4314495","Type":"ContainerDied","Data":"429e69f9dec1fcb7c5815ccb4cc222560de0d39eb2bcf86d65d49d4afa8310f4"} Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.528489 4792 scope.go:117] "RemoveContainer" containerID="ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.528404 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crmpd" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.603767 4792 scope.go:117] "RemoveContainer" containerID="85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.610122 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crmpd"] Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.629623 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-crmpd"] Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.637777 4792 scope.go:117] "RemoveContainer" containerID="2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.700238 4792 scope.go:117] "RemoveContainer" containerID="ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be" Dec 02 19:03:40 crc kubenswrapper[4792]: E1202 19:03:40.700589 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be\": container with ID starting with ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be not found: ID does not exist" containerID="ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.700626 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be"} err="failed to get container status \"ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be\": rpc error: code = NotFound desc = could not find container \"ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be\": container with ID starting with ef3d9fc927237a2651c2b47bdef09e7d9714c2606cb66224a726b0bae300e5be not found: ID does not exist" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.700656 4792 scope.go:117] "RemoveContainer" containerID="85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc" Dec 02 19:03:40 crc kubenswrapper[4792]: E1202 19:03:40.701011 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc\": container with ID starting with 85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc not found: ID does not exist" containerID="85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.701039 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc"} err="failed to get container status \"85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc\": rpc error: code = NotFound desc = could not find container \"85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc\": container with ID starting with 85f4d1634f54991d6126a22b65862a361075ea077143b124f2b5264f0b81bacc not found: ID does not exist" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.701058 4792 scope.go:117] "RemoveContainer" containerID="2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803" Dec 02 19:03:40 crc kubenswrapper[4792]: E1202 19:03:40.701442 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803\": container with ID starting with 2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803 not found: ID does not exist" containerID="2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803" Dec 02 19:03:40 crc kubenswrapper[4792]: I1202 19:03:40.701947 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803"} err="failed to get container status \"2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803\": rpc error: code = NotFound desc = could not find container \"2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803\": container with ID starting with 2c66744be2a697f4b5f80001f21564e0a7da55c284cf1f57981171af6da3f803 not found: ID does not exist" Dec 02 19:03:41 crc kubenswrapper[4792]: I1202 19:03:41.555998 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30d956ed-b602-4760-a856-6e47c4314495" path="/var/lib/kubelet/pods/30d956ed-b602-4760-a856-6e47c4314495/volumes" Dec 02 19:03:43 crc kubenswrapper[4792]: I1202 19:03:43.859855 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:43 crc kubenswrapper[4792]: I1202 19:03:43.860697 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:43 crc kubenswrapper[4792]: I1202 19:03:43.949754 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:44 crc kubenswrapper[4792]: I1202 19:03:44.654102 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:45 crc kubenswrapper[4792]: I1202 19:03:45.100130 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-khxqd"] Dec 02 19:03:46 crc kubenswrapper[4792]: I1202 19:03:46.603771 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-khxqd" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="registry-server" containerID="cri-o://0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf" gracePeriod=2 Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.215832 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.294042 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfq87\" (UniqueName: \"kubernetes.io/projected/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-kube-api-access-zfq87\") pod \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.294212 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-utilities\") pod \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.294269 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-catalog-content\") pod \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\" (UID: \"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a\") " Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.301952 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-kube-api-access-zfq87" (OuterVolumeSpecName: "kube-api-access-zfq87") pod "9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" (UID: "9cbbf6bf-22e8-49ab-affd-8921f0d8c60a"). InnerVolumeSpecName "kube-api-access-zfq87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.312551 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-utilities" (OuterVolumeSpecName: "utilities") pod "9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" (UID: "9cbbf6bf-22e8-49ab-affd-8921f0d8c60a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.325482 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" (UID: "9cbbf6bf-22e8-49ab-affd-8921f0d8c60a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.397545 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.397578 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfq87\" (UniqueName: \"kubernetes.io/projected/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-kube-api-access-zfq87\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.397590 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.622482 4792 generic.go:334] "Generic (PLEG): container finished" podID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerID="0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf" exitCode=0 Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.622581 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-khxqd" event={"ID":"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a","Type":"ContainerDied","Data":"0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf"} Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.622654 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-khxqd" event={"ID":"9cbbf6bf-22e8-49ab-affd-8921f0d8c60a","Type":"ContainerDied","Data":"e3c98f3afee29a2e264def2e9fc1ee1c6687ae2771f64ef5a526ebbc21b1f265"} Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.622683 4792 scope.go:117] "RemoveContainer" containerID="0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.622607 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-khxqd" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.672161 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-khxqd"] Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.681553 4792 scope.go:117] "RemoveContainer" containerID="12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.692200 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-khxqd"] Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.749305 4792 scope.go:117] "RemoveContainer" containerID="90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.797592 4792 scope.go:117] "RemoveContainer" containerID="0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf" Dec 02 19:03:47 crc kubenswrapper[4792]: E1202 19:03:47.798134 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf\": container with ID starting with 0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf not found: ID does not exist" containerID="0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.798184 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf"} err="failed to get container status \"0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf\": rpc error: code = NotFound desc = could not find container \"0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf\": container with ID starting with 0a61e2a1d41b5e866b8669ac371a10cfbbddb9cb205fa149ecba2ec41babeaaf not found: ID does not exist" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.798219 4792 scope.go:117] "RemoveContainer" containerID="12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89" Dec 02 19:03:47 crc kubenswrapper[4792]: E1202 19:03:47.798561 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89\": container with ID starting with 12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89 not found: ID does not exist" containerID="12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.798626 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89"} err="failed to get container status \"12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89\": rpc error: code = NotFound desc = could not find container \"12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89\": container with ID starting with 12cf2cf07423dbec867d3ef98d472fffaffb0f61f047e9d98d7588efc65d1b89 not found: ID does not exist" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.798694 4792 scope.go:117] "RemoveContainer" containerID="90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae" Dec 02 19:03:47 crc kubenswrapper[4792]: E1202 19:03:47.800309 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae\": container with ID starting with 90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae not found: ID does not exist" containerID="90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae" Dec 02 19:03:47 crc kubenswrapper[4792]: I1202 19:03:47.800355 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae"} err="failed to get container status \"90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae\": rpc error: code = NotFound desc = could not find container \"90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae\": container with ID starting with 90d4c810d2cdb9090702d67d6a32562422a9d400118ce8669e7f10d5ae9675ae not found: ID does not exist" Dec 02 19:03:49 crc kubenswrapper[4792]: I1202 19:03:49.562830 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" path="/var/lib/kubelet/pods/9cbbf6bf-22e8-49ab-affd-8921f0d8c60a/volumes" Dec 02 19:03:50 crc kubenswrapper[4792]: I1202 19:03:50.539849 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:03:50 crc kubenswrapper[4792]: E1202 19:03:50.540167 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:04:01 crc kubenswrapper[4792]: I1202 19:04:01.540171 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:04:01 crc kubenswrapper[4792]: E1202 19:04:01.541055 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:04:14 crc kubenswrapper[4792]: I1202 19:04:14.540298 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:04:14 crc kubenswrapper[4792]: E1202 19:04:14.541750 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:04:26 crc kubenswrapper[4792]: I1202 19:04:26.540858 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:04:26 crc kubenswrapper[4792]: E1202 19:04:26.542014 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:04:34 crc kubenswrapper[4792]: I1202 19:04:34.236873 4792 scope.go:117] "RemoveContainer" containerID="878a6446d1a442e6e2bca3c1e5afff1787a2da6fa21d22591aa8c78d44356b7f" Dec 02 19:04:40 crc kubenswrapper[4792]: I1202 19:04:40.540757 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:04:40 crc kubenswrapper[4792]: E1202 19:04:40.542068 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:04:52 crc kubenswrapper[4792]: I1202 19:04:52.569466 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:04:52 crc kubenswrapper[4792]: E1202 19:04:52.570646 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:05:04 crc kubenswrapper[4792]: I1202 19:05:04.540540 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:05:04 crc kubenswrapper[4792]: E1202 19:05:04.541332 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:05:16 crc kubenswrapper[4792]: I1202 19:05:16.540024 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:05:16 crc kubenswrapper[4792]: E1202 19:05:16.541098 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:05:29 crc kubenswrapper[4792]: I1202 19:05:29.561025 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:05:29 crc kubenswrapper[4792]: E1202 19:05:29.562968 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:05:34 crc kubenswrapper[4792]: I1202 19:05:34.357291 4792 scope.go:117] "RemoveContainer" containerID="4549abf865afee3fd2aee942b4a1195a222f2f47a9607b87926af8e8f824dd3d" Dec 02 19:05:34 crc kubenswrapper[4792]: I1202 19:05:34.399605 4792 scope.go:117] "RemoveContainer" containerID="43670588f68762a39538e2ac8c4445d578cff5ff796b43ec83ec7fcb3c0445c2" Dec 02 19:05:41 crc kubenswrapper[4792]: I1202 19:05:41.590965 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:05:41 crc kubenswrapper[4792]: E1202 19:05:41.591826 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:05:54 crc kubenswrapper[4792]: I1202 19:05:54.539830 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:05:54 crc kubenswrapper[4792]: E1202 19:05:54.540871 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:06:09 crc kubenswrapper[4792]: I1202 19:06:09.560129 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:06:09 crc kubenswrapper[4792]: E1202 19:06:09.561078 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.480433 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4p8j9"] Dec 02 19:06:21 crc kubenswrapper[4792]: E1202 19:06:21.481751 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="extract-utilities" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.481770 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="extract-utilities" Dec 02 19:06:21 crc kubenswrapper[4792]: E1202 19:06:21.481788 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="extract-utilities" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.481796 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="extract-utilities" Dec 02 19:06:21 crc kubenswrapper[4792]: E1202 19:06:21.481815 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="extract-content" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.481825 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="extract-content" Dec 02 19:06:21 crc kubenswrapper[4792]: E1202 19:06:21.481855 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="registry-server" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.481865 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="registry-server" Dec 02 19:06:21 crc kubenswrapper[4792]: E1202 19:06:21.481887 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="extract-content" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.481898 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="extract-content" Dec 02 19:06:21 crc kubenswrapper[4792]: E1202 19:06:21.481916 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="registry-server" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.481924 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="registry-server" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.482205 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cbbf6bf-22e8-49ab-affd-8921f0d8c60a" containerName="registry-server" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.482255 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="30d956ed-b602-4760-a856-6e47c4314495" containerName="registry-server" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.484381 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.502811 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4p8j9"] Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.540888 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:06:21 crc kubenswrapper[4792]: E1202 19:06:21.541628 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.552387 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-utilities\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.552550 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxwzr\" (UniqueName: \"kubernetes.io/projected/09aad814-ab17-41ad-9489-027379a3479e-kube-api-access-wxwzr\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.552683 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-catalog-content\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.655199 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-utilities\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.655346 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxwzr\" (UniqueName: \"kubernetes.io/projected/09aad814-ab17-41ad-9489-027379a3479e-kube-api-access-wxwzr\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.655487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-catalog-content\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.655899 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-utilities\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.656322 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-catalog-content\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.664230 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mvsjf"] Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.668225 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.691432 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvsjf"] Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.701217 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxwzr\" (UniqueName: \"kubernetes.io/projected/09aad814-ab17-41ad-9489-027379a3479e-kube-api-access-wxwzr\") pod \"community-operators-4p8j9\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.757858 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-catalog-content\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.758059 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsc7r\" (UniqueName: \"kubernetes.io/projected/56e68866-5666-43fb-996f-81d75af7ca3e-kube-api-access-tsc7r\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.758135 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-utilities\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.837878 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.860316 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-catalog-content\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.860476 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsc7r\" (UniqueName: \"kubernetes.io/projected/56e68866-5666-43fb-996f-81d75af7ca3e-kube-api-access-tsc7r\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.860512 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-utilities\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.860750 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-catalog-content\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.860774 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-utilities\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.890402 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsc7r\" (UniqueName: \"kubernetes.io/projected/56e68866-5666-43fb-996f-81d75af7ca3e-kube-api-access-tsc7r\") pod \"redhat-operators-mvsjf\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:21 crc kubenswrapper[4792]: I1202 19:06:21.996760 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:22 crc kubenswrapper[4792]: I1202 19:06:22.332488 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4p8j9"] Dec 02 19:06:22 crc kubenswrapper[4792]: W1202 19:06:22.344354 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09aad814_ab17_41ad_9489_027379a3479e.slice/crio-2fef98be26c796d8618617e0bed3c9a1ec21703ed7e1402b74a218ed5ea1f00e WatchSource:0}: Error finding container 2fef98be26c796d8618617e0bed3c9a1ec21703ed7e1402b74a218ed5ea1f00e: Status 404 returned error can't find the container with id 2fef98be26c796d8618617e0bed3c9a1ec21703ed7e1402b74a218ed5ea1f00e Dec 02 19:06:22 crc kubenswrapper[4792]: W1202 19:06:22.503890 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56e68866_5666_43fb_996f_81d75af7ca3e.slice/crio-54a1aa0017c299c61cc6ff587681141bbad4571350d0711e0405711ef8692871 WatchSource:0}: Error finding container 54a1aa0017c299c61cc6ff587681141bbad4571350d0711e0405711ef8692871: Status 404 returned error can't find the container with id 54a1aa0017c299c61cc6ff587681141bbad4571350d0711e0405711ef8692871 Dec 02 19:06:22 crc kubenswrapper[4792]: I1202 19:06:22.504206 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvsjf"] Dec 02 19:06:22 crc kubenswrapper[4792]: I1202 19:06:22.713676 4792 generic.go:334] "Generic (PLEG): container finished" podID="09aad814-ab17-41ad-9489-027379a3479e" containerID="f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc" exitCode=0 Dec 02 19:06:22 crc kubenswrapper[4792]: I1202 19:06:22.713772 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4p8j9" event={"ID":"09aad814-ab17-41ad-9489-027379a3479e","Type":"ContainerDied","Data":"f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc"} Dec 02 19:06:22 crc kubenswrapper[4792]: I1202 19:06:22.714029 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4p8j9" event={"ID":"09aad814-ab17-41ad-9489-027379a3479e","Type":"ContainerStarted","Data":"2fef98be26c796d8618617e0bed3c9a1ec21703ed7e1402b74a218ed5ea1f00e"} Dec 02 19:06:22 crc kubenswrapper[4792]: I1202 19:06:22.717633 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvsjf" event={"ID":"56e68866-5666-43fb-996f-81d75af7ca3e","Type":"ContainerStarted","Data":"54a1aa0017c299c61cc6ff587681141bbad4571350d0711e0405711ef8692871"} Dec 02 19:06:23 crc kubenswrapper[4792]: I1202 19:06:23.762128 4792 generic.go:334] "Generic (PLEG): container finished" podID="56e68866-5666-43fb-996f-81d75af7ca3e" containerID="491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41" exitCode=0 Dec 02 19:06:23 crc kubenswrapper[4792]: I1202 19:06:23.762216 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvsjf" event={"ID":"56e68866-5666-43fb-996f-81d75af7ca3e","Type":"ContainerDied","Data":"491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41"} Dec 02 19:06:24 crc kubenswrapper[4792]: I1202 19:06:24.779769 4792 generic.go:334] "Generic (PLEG): container finished" podID="09aad814-ab17-41ad-9489-027379a3479e" containerID="0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948" exitCode=0 Dec 02 19:06:24 crc kubenswrapper[4792]: I1202 19:06:24.779844 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4p8j9" event={"ID":"09aad814-ab17-41ad-9489-027379a3479e","Type":"ContainerDied","Data":"0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948"} Dec 02 19:06:25 crc kubenswrapper[4792]: I1202 19:06:25.798222 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvsjf" event={"ID":"56e68866-5666-43fb-996f-81d75af7ca3e","Type":"ContainerStarted","Data":"eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521"} Dec 02 19:06:25 crc kubenswrapper[4792]: I1202 19:06:25.801682 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4p8j9" event={"ID":"09aad814-ab17-41ad-9489-027379a3479e","Type":"ContainerStarted","Data":"e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef"} Dec 02 19:06:26 crc kubenswrapper[4792]: I1202 19:06:26.847409 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4p8j9" podStartSLOduration=3.364912725 podStartE2EDuration="5.847384875s" podCreationTimestamp="2025-12-02 19:06:21 +0000 UTC" firstStartedPulling="2025-12-02 19:06:22.715495197 +0000 UTC m=+1813.488387535" lastFinishedPulling="2025-12-02 19:06:25.197967357 +0000 UTC m=+1815.970859685" observedRunningTime="2025-12-02 19:06:26.836344616 +0000 UTC m=+1817.609236984" watchObservedRunningTime="2025-12-02 19:06:26.847384875 +0000 UTC m=+1817.620277233" Dec 02 19:06:30 crc kubenswrapper[4792]: I1202 19:06:30.861931 4792 generic.go:334] "Generic (PLEG): container finished" podID="56e68866-5666-43fb-996f-81d75af7ca3e" containerID="eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521" exitCode=0 Dec 02 19:06:30 crc kubenswrapper[4792]: I1202 19:06:30.862023 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvsjf" event={"ID":"56e68866-5666-43fb-996f-81d75af7ca3e","Type":"ContainerDied","Data":"eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521"} Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.838571 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.839164 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.879556 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvsjf" event={"ID":"56e68866-5666-43fb-996f-81d75af7ca3e","Type":"ContainerStarted","Data":"01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42"} Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.884642 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.908563 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mvsjf" podStartSLOduration=3.338733241 podStartE2EDuration="10.908549158s" podCreationTimestamp="2025-12-02 19:06:21 +0000 UTC" firstStartedPulling="2025-12-02 19:06:23.764064869 +0000 UTC m=+1814.536957207" lastFinishedPulling="2025-12-02 19:06:31.333880806 +0000 UTC m=+1822.106773124" observedRunningTime="2025-12-02 19:06:31.902403737 +0000 UTC m=+1822.675296055" watchObservedRunningTime="2025-12-02 19:06:31.908549158 +0000 UTC m=+1822.681441486" Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.941680 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.997706 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:31 crc kubenswrapper[4792]: I1202 19:06:31.997755 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:32 crc kubenswrapper[4792]: I1202 19:06:32.540330 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:06:32 crc kubenswrapper[4792]: E1202 19:06:32.541624 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:06:33 crc kubenswrapper[4792]: I1202 19:06:33.040428 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mvsjf" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="registry-server" probeResult="failure" output=< Dec 02 19:06:33 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 19:06:33 crc kubenswrapper[4792]: > Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.256483 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4p8j9"] Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.257794 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4p8j9" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="registry-server" containerID="cri-o://e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef" gracePeriod=2 Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.543265 4792 scope.go:117] "RemoveContainer" containerID="c033e5f4a3c35edec4a7596610fee866738e841d71ea312bd5fb797f51f7a2e0" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.570718 4792 scope.go:117] "RemoveContainer" containerID="cf37d2c6f1947ee99004a85750d08ab4604a9271fadad530a025b430987e144c" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.598771 4792 scope.go:117] "RemoveContainer" containerID="2c5bf2758abc6698c72fa1104bf586bd08191a488adcd5fe019494ff78497d20" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.622324 4792 scope.go:117] "RemoveContainer" containerID="db82f0bb76c64bdde4546c248e7fc8193594c10420a57d4c7770ec959341fb04" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.735441 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.757247 4792 scope.go:117] "RemoveContainer" containerID="a3313d463969b31df8605a6d07db02a35a918d4eba57257fb921e8313dfbb57a" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.790339 4792 scope.go:117] "RemoveContainer" containerID="5a9b1582e15bb65792e7df9a181726aa3fb9f3f8acb6aff76c21e16bc6d385df" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.811196 4792 scope.go:117] "RemoveContainer" containerID="cb3e5581d1629ec5be006d82da763064d24779c5504e7d06f8e3ed2b51c7daf4" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.833480 4792 scope.go:117] "RemoveContainer" containerID="6aa9c6b275819640b44ddb3cfd4e9a349ad08dc5ae9af788b165d4f86c1fba09" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.853128 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxwzr\" (UniqueName: \"kubernetes.io/projected/09aad814-ab17-41ad-9489-027379a3479e-kube-api-access-wxwzr\") pod \"09aad814-ab17-41ad-9489-027379a3479e\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.854736 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-catalog-content\") pod \"09aad814-ab17-41ad-9489-027379a3479e\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.854807 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-utilities\") pod \"09aad814-ab17-41ad-9489-027379a3479e\" (UID: \"09aad814-ab17-41ad-9489-027379a3479e\") " Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.855996 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-utilities" (OuterVolumeSpecName: "utilities") pod "09aad814-ab17-41ad-9489-027379a3479e" (UID: "09aad814-ab17-41ad-9489-027379a3479e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.858914 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09aad814-ab17-41ad-9489-027379a3479e-kube-api-access-wxwzr" (OuterVolumeSpecName: "kube-api-access-wxwzr") pod "09aad814-ab17-41ad-9489-027379a3479e" (UID: "09aad814-ab17-41ad-9489-027379a3479e"). InnerVolumeSpecName "kube-api-access-wxwzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.866950 4792 scope.go:117] "RemoveContainer" containerID="4fe86ac2ba0fc0060139cb72993d04c5d714627d2770c04f8b1af3b984869601" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.918291 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09aad814-ab17-41ad-9489-027379a3479e" (UID: "09aad814-ab17-41ad-9489-027379a3479e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.918846 4792 generic.go:334] "Generic (PLEG): container finished" podID="09aad814-ab17-41ad-9489-027379a3479e" containerID="e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef" exitCode=0 Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.918945 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4p8j9" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.918917 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4p8j9" event={"ID":"09aad814-ab17-41ad-9489-027379a3479e","Type":"ContainerDied","Data":"e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef"} Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.919224 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4p8j9" event={"ID":"09aad814-ab17-41ad-9489-027379a3479e","Type":"ContainerDied","Data":"2fef98be26c796d8618617e0bed3c9a1ec21703ed7e1402b74a218ed5ea1f00e"} Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.919282 4792 scope.go:117] "RemoveContainer" containerID="e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.957977 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.958006 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aad814-ab17-41ad-9489-027379a3479e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.958016 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxwzr\" (UniqueName: \"kubernetes.io/projected/09aad814-ab17-41ad-9489-027379a3479e-kube-api-access-wxwzr\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.977263 4792 scope.go:117] "RemoveContainer" containerID="0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.986176 4792 scope.go:117] "RemoveContainer" containerID="7f40b662c374b2ba1ac05c64023ecd177f57b3fa1580eef0822bccac917d6fdf" Dec 02 19:06:34 crc kubenswrapper[4792]: I1202 19:06:34.993549 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4p8j9"] Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.003264 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4p8j9"] Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.004639 4792 scope.go:117] "RemoveContainer" containerID="f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc" Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.039221 4792 scope.go:117] "RemoveContainer" containerID="e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef" Dec 02 19:06:35 crc kubenswrapper[4792]: E1202 19:06:35.039669 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef\": container with ID starting with e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef not found: ID does not exist" containerID="e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef" Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.039704 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef"} err="failed to get container status \"e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef\": rpc error: code = NotFound desc = could not find container \"e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef\": container with ID starting with e9c48f9b2d804ae6c45f9ca94ab7274b215eb983121df0cef7b2d1391a4ec3ef not found: ID does not exist" Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.039729 4792 scope.go:117] "RemoveContainer" containerID="0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948" Dec 02 19:06:35 crc kubenswrapper[4792]: E1202 19:06:35.040141 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948\": container with ID starting with 0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948 not found: ID does not exist" containerID="0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948" Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.040171 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948"} err="failed to get container status \"0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948\": rpc error: code = NotFound desc = could not find container \"0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948\": container with ID starting with 0fd72ba58d4c9d687ee081685fa7e10120b3e2301f926b297c1e6fae444ff948 not found: ID does not exist" Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.040192 4792 scope.go:117] "RemoveContainer" containerID="f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc" Dec 02 19:06:35 crc kubenswrapper[4792]: E1202 19:06:35.040575 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc\": container with ID starting with f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc not found: ID does not exist" containerID="f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc" Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.040626 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc"} err="failed to get container status \"f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc\": rpc error: code = NotFound desc = could not find container \"f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc\": container with ID starting with f84bef9c8b5650dc3691707095cbd079291e9eb0ff07a617f6fe16f14d6657cc not found: ID does not exist" Dec 02 19:06:35 crc kubenswrapper[4792]: I1202 19:06:35.552599 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09aad814-ab17-41ad-9489-027379a3479e" path="/var/lib/kubelet/pods/09aad814-ab17-41ad-9489-027379a3479e/volumes" Dec 02 19:06:42 crc kubenswrapper[4792]: I1202 19:06:42.068590 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:42 crc kubenswrapper[4792]: I1202 19:06:42.138021 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:42 crc kubenswrapper[4792]: I1202 19:06:42.308365 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvsjf"] Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.036672 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mvsjf" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="registry-server" containerID="cri-o://01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42" gracePeriod=2 Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.699394 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.797333 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-catalog-content\") pod \"56e68866-5666-43fb-996f-81d75af7ca3e\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.797408 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsc7r\" (UniqueName: \"kubernetes.io/projected/56e68866-5666-43fb-996f-81d75af7ca3e-kube-api-access-tsc7r\") pod \"56e68866-5666-43fb-996f-81d75af7ca3e\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.797468 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-utilities\") pod \"56e68866-5666-43fb-996f-81d75af7ca3e\" (UID: \"56e68866-5666-43fb-996f-81d75af7ca3e\") " Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.798899 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-utilities" (OuterVolumeSpecName: "utilities") pod "56e68866-5666-43fb-996f-81d75af7ca3e" (UID: "56e68866-5666-43fb-996f-81d75af7ca3e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.804967 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e68866-5666-43fb-996f-81d75af7ca3e-kube-api-access-tsc7r" (OuterVolumeSpecName: "kube-api-access-tsc7r") pod "56e68866-5666-43fb-996f-81d75af7ca3e" (UID: "56e68866-5666-43fb-996f-81d75af7ca3e"). InnerVolumeSpecName "kube-api-access-tsc7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.900278 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsc7r\" (UniqueName: \"kubernetes.io/projected/56e68866-5666-43fb-996f-81d75af7ca3e-kube-api-access-tsc7r\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.900309 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:44 crc kubenswrapper[4792]: I1202 19:06:44.915010 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56e68866-5666-43fb-996f-81d75af7ca3e" (UID: "56e68866-5666-43fb-996f-81d75af7ca3e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.002785 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e68866-5666-43fb-996f-81d75af7ca3e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.050461 4792 generic.go:334] "Generic (PLEG): container finished" podID="56e68866-5666-43fb-996f-81d75af7ca3e" containerID="01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42" exitCode=0 Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.050515 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvsjf" event={"ID":"56e68866-5666-43fb-996f-81d75af7ca3e","Type":"ContainerDied","Data":"01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42"} Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.050568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvsjf" event={"ID":"56e68866-5666-43fb-996f-81d75af7ca3e","Type":"ContainerDied","Data":"54a1aa0017c299c61cc6ff587681141bbad4571350d0711e0405711ef8692871"} Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.050589 4792 scope.go:117] "RemoveContainer" containerID="01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.050618 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvsjf" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.080685 4792 scope.go:117] "RemoveContainer" containerID="eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.094701 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvsjf"] Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.104905 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mvsjf"] Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.120402 4792 scope.go:117] "RemoveContainer" containerID="491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.169823 4792 scope.go:117] "RemoveContainer" containerID="01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42" Dec 02 19:06:45 crc kubenswrapper[4792]: E1202 19:06:45.170377 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42\": container with ID starting with 01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42 not found: ID does not exist" containerID="01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.170459 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42"} err="failed to get container status \"01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42\": rpc error: code = NotFound desc = could not find container \"01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42\": container with ID starting with 01b53ee7d4e47de555fa0f6b4211cabdaded761b49044ccf8504016930dcba42 not found: ID does not exist" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.170620 4792 scope.go:117] "RemoveContainer" containerID="eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521" Dec 02 19:06:45 crc kubenswrapper[4792]: E1202 19:06:45.171584 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521\": container with ID starting with eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521 not found: ID does not exist" containerID="eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.171654 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521"} err="failed to get container status \"eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521\": rpc error: code = NotFound desc = could not find container \"eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521\": container with ID starting with eb21e730cac5b3186a8d8e23e6c401cde044cbe2ba3515cf8dcd58daffcd7521 not found: ID does not exist" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.171702 4792 scope.go:117] "RemoveContainer" containerID="491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41" Dec 02 19:06:45 crc kubenswrapper[4792]: E1202 19:06:45.172388 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41\": container with ID starting with 491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41 not found: ID does not exist" containerID="491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.172470 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41"} err="failed to get container status \"491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41\": rpc error: code = NotFound desc = could not find container \"491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41\": container with ID starting with 491f4660400768e7dff990a7d7b55b50ccf0ec508a7c6080b2db22393e20ea41 not found: ID does not exist" Dec 02 19:06:45 crc kubenswrapper[4792]: I1202 19:06:45.560185 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" path="/var/lib/kubelet/pods/56e68866-5666-43fb-996f-81d75af7ca3e/volumes" Dec 02 19:06:46 crc kubenswrapper[4792]: I1202 19:06:46.542120 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:06:46 crc kubenswrapper[4792]: E1202 19:06:46.542608 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:06:50 crc kubenswrapper[4792]: I1202 19:06:50.120893 4792 generic.go:334] "Generic (PLEG): container finished" podID="e29a810b-8a51-4d2c-ab9e-61315499b272" containerID="fa8662c93df49da6493b15490dedef80df76d083ca291fc327a50cc452f44bd7" exitCode=0 Dec 02 19:06:50 crc kubenswrapper[4792]: I1202 19:06:50.120973 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" event={"ID":"e29a810b-8a51-4d2c-ab9e-61315499b272","Type":"ContainerDied","Data":"fa8662c93df49da6493b15490dedef80df76d083ca291fc327a50cc452f44bd7"} Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.795654 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.867469 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-bootstrap-combined-ca-bundle\") pod \"e29a810b-8a51-4d2c-ab9e-61315499b272\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.867748 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-ssh-key\") pod \"e29a810b-8a51-4d2c-ab9e-61315499b272\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.867838 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz4tc\" (UniqueName: \"kubernetes.io/projected/e29a810b-8a51-4d2c-ab9e-61315499b272-kube-api-access-fz4tc\") pod \"e29a810b-8a51-4d2c-ab9e-61315499b272\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.868010 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-inventory\") pod \"e29a810b-8a51-4d2c-ab9e-61315499b272\" (UID: \"e29a810b-8a51-4d2c-ab9e-61315499b272\") " Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.889807 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "e29a810b-8a51-4d2c-ab9e-61315499b272" (UID: "e29a810b-8a51-4d2c-ab9e-61315499b272"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.889885 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e29a810b-8a51-4d2c-ab9e-61315499b272-kube-api-access-fz4tc" (OuterVolumeSpecName: "kube-api-access-fz4tc") pod "e29a810b-8a51-4d2c-ab9e-61315499b272" (UID: "e29a810b-8a51-4d2c-ab9e-61315499b272"). InnerVolumeSpecName "kube-api-access-fz4tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.904579 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e29a810b-8a51-4d2c-ab9e-61315499b272" (UID: "e29a810b-8a51-4d2c-ab9e-61315499b272"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.943190 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-inventory" (OuterVolumeSpecName: "inventory") pod "e29a810b-8a51-4d2c-ab9e-61315499b272" (UID: "e29a810b-8a51-4d2c-ab9e-61315499b272"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.970545 4792 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.970591 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.970604 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz4tc\" (UniqueName: \"kubernetes.io/projected/e29a810b-8a51-4d2c-ab9e-61315499b272-kube-api-access-fz4tc\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:51 crc kubenswrapper[4792]: I1202 19:06:51.970615 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e29a810b-8a51-4d2c-ab9e-61315499b272-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.147046 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" event={"ID":"e29a810b-8a51-4d2c-ab9e-61315499b272","Type":"ContainerDied","Data":"438a4083ba3a888cf2936a6b451db6719db9f7e5badacc9ad996220efc491142"} Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.147366 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="438a4083ba3a888cf2936a6b451db6719db9f7e5badacc9ad996220efc491142" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.147168 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.241598 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk"] Dec 02 19:06:52 crc kubenswrapper[4792]: E1202 19:06:52.242078 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="registry-server" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242093 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="registry-server" Dec 02 19:06:52 crc kubenswrapper[4792]: E1202 19:06:52.242108 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e29a810b-8a51-4d2c-ab9e-61315499b272" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242115 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e29a810b-8a51-4d2c-ab9e-61315499b272" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 19:06:52 crc kubenswrapper[4792]: E1202 19:06:52.242130 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="extract-content" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242136 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="extract-content" Dec 02 19:06:52 crc kubenswrapper[4792]: E1202 19:06:52.242148 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="extract-utilities" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242155 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="extract-utilities" Dec 02 19:06:52 crc kubenswrapper[4792]: E1202 19:06:52.242165 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="extract-utilities" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242170 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="extract-utilities" Dec 02 19:06:52 crc kubenswrapper[4792]: E1202 19:06:52.242186 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="registry-server" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242192 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="registry-server" Dec 02 19:06:52 crc kubenswrapper[4792]: E1202 19:06:52.242213 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="extract-content" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242219 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="extract-content" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242428 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e68866-5666-43fb-996f-81d75af7ca3e" containerName="registry-server" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242439 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="09aad814-ab17-41ad-9489-027379a3479e" containerName="registry-server" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.242456 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e29a810b-8a51-4d2c-ab9e-61315499b272" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.243219 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.246132 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.246179 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.246250 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.246290 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.262269 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk"] Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.277356 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.277434 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99dbs\" (UniqueName: \"kubernetes.io/projected/6263b59c-7edd-49eb-aac3-42fd1c5da951-kube-api-access-99dbs\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.277501 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.380291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99dbs\" (UniqueName: \"kubernetes.io/projected/6263b59c-7edd-49eb-aac3-42fd1c5da951-kube-api-access-99dbs\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.380466 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.380683 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.384820 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.384832 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.407236 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99dbs\" (UniqueName: \"kubernetes.io/projected/6263b59c-7edd-49eb-aac3-42fd1c5da951-kube-api-access-99dbs\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:52 crc kubenswrapper[4792]: I1202 19:06:52.569689 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:06:53 crc kubenswrapper[4792]: I1202 19:06:53.210479 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk"] Dec 02 19:06:54 crc kubenswrapper[4792]: I1202 19:06:54.170907 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" event={"ID":"6263b59c-7edd-49eb-aac3-42fd1c5da951","Type":"ContainerStarted","Data":"d2e8a2ebefb53f4e8ef7d195b670e39bda2d8577f0392c6d4c371ac146ca63a3"} Dec 02 19:06:54 crc kubenswrapper[4792]: I1202 19:06:54.171258 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" event={"ID":"6263b59c-7edd-49eb-aac3-42fd1c5da951","Type":"ContainerStarted","Data":"8bb4882b189a836938c0f08b6ee073d04f3865d275ee261d787673b514de03ab"} Dec 02 19:06:54 crc kubenswrapper[4792]: I1202 19:06:54.199603 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" podStartSLOduration=1.7015324170000001 podStartE2EDuration="2.199581278s" podCreationTimestamp="2025-12-02 19:06:52 +0000 UTC" firstStartedPulling="2025-12-02 19:06:53.211416613 +0000 UTC m=+1843.984308951" lastFinishedPulling="2025-12-02 19:06:53.709465474 +0000 UTC m=+1844.482357812" observedRunningTime="2025-12-02 19:06:54.195101901 +0000 UTC m=+1844.967994259" watchObservedRunningTime="2025-12-02 19:06:54.199581278 +0000 UTC m=+1844.972473616" Dec 02 19:06:57 crc kubenswrapper[4792]: I1202 19:06:57.540779 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:06:57 crc kubenswrapper[4792]: E1202 19:06:57.541891 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:07:11 crc kubenswrapper[4792]: I1202 19:07:11.540281 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:07:12 crc kubenswrapper[4792]: I1202 19:07:12.455222 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"11788464675fa1d7887d1f27536456407985fbf82e43eb7551c12897d3a408e0"} Dec 02 19:07:16 crc kubenswrapper[4792]: I1202 19:07:16.046610 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-vh4wp"] Dec 02 19:07:16 crc kubenswrapper[4792]: I1202 19:07:16.056055 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-93d3-account-create-update-ptvkf"] Dec 02 19:07:16 crc kubenswrapper[4792]: I1202 19:07:16.064786 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-93d3-account-create-update-ptvkf"] Dec 02 19:07:16 crc kubenswrapper[4792]: I1202 19:07:16.077594 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-vh4wp"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.050584 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-3cdb-account-create-update-p2q9g"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.072169 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-64h7l"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.083670 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-gpxp2"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.093065 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-9346-account-create-update-4t87k"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.102769 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-64h7l"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.112838 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-3cdb-account-create-update-p2q9g"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.122752 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-gpxp2"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.132220 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-9346-account-create-update-4t87k"] Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.564698 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d35c039-6dbb-4fb5-b02e-9d8453c72dd8" path="/var/lib/kubelet/pods/6d35c039-6dbb-4fb5-b02e-9d8453c72dd8/volumes" Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.567220 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad0e58c2-6db5-439e-897c-89de96dfe2f2" path="/var/lib/kubelet/pods/ad0e58c2-6db5-439e-897c-89de96dfe2f2/volumes" Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.569484 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2d3693c-b058-4505-a62c-7cf9e374d3d7" path="/var/lib/kubelet/pods/c2d3693c-b058-4505-a62c-7cf9e374d3d7/volumes" Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.571922 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c989d282-ef5d-4644-8610-8207aae7d341" path="/var/lib/kubelet/pods/c989d282-ef5d-4644-8610-8207aae7d341/volumes" Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.574671 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e62e5f1d-017c-4f71-9cd5-b02bd55a7138" path="/var/lib/kubelet/pods/e62e5f1d-017c-4f71-9cd5-b02bd55a7138/volumes" Dec 02 19:07:17 crc kubenswrapper[4792]: I1202 19:07:17.576317 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9cfe4ef-8938-431b-8489-9548343fec57" path="/var/lib/kubelet/pods/f9cfe4ef-8938-431b-8489-9548343fec57/volumes" Dec 02 19:07:35 crc kubenswrapper[4792]: I1202 19:07:35.111786 4792 scope.go:117] "RemoveContainer" containerID="a551b2c7ac84b339341ea335745d4a67bade2c1ba3bec51228c0a8dc47d67a3c" Dec 02 19:07:35 crc kubenswrapper[4792]: I1202 19:07:35.188747 4792 scope.go:117] "RemoveContainer" containerID="ce372248cbea7bbf9d4d6e3b4a0539b4c6f6eb7bd6a57274f8e66f2f3ffb4499" Dec 02 19:07:35 crc kubenswrapper[4792]: I1202 19:07:35.230265 4792 scope.go:117] "RemoveContainer" containerID="d0c0c9a3d10fb01f5e05c9012c476fb9cb057b865a2cff3a4895577d9588dccd" Dec 02 19:07:35 crc kubenswrapper[4792]: I1202 19:07:35.265717 4792 scope.go:117] "RemoveContainer" containerID="ed0daa3cb029ad38ac04f7c041663a931eccfe94b40feccc219e4774246baab3" Dec 02 19:07:35 crc kubenswrapper[4792]: I1202 19:07:35.317378 4792 scope.go:117] "RemoveContainer" containerID="4e8ab10fa2e77565e275eca732d628d40e006b77ebb16c9ccd8499db7eaf40da" Dec 02 19:07:35 crc kubenswrapper[4792]: I1202 19:07:35.403636 4792 scope.go:117] "RemoveContainer" containerID="c32063f114b0587b4fa057851c5bba268a42615c302c85a5776f23ac21374016" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.078770 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-ebea-account-create-update-fq5wk"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.093600 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-dsqd2"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.106638 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8a45-account-create-update-rvtjj"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.117861 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-dsqd2"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.127087 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-ebea-account-create-update-fq5wk"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.134588 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-87a8-account-create-update-fbvt2"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.142347 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-26zbd"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.150253 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-create-h2qp6"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.158180 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-wh2n2"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.165924 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-1802-account-create-update-tfzgc"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.173339 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-4svmb"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.180766 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-26zbd"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.188121 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-87a8-account-create-update-fbvt2"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.195880 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8a45-account-create-update-rvtjj"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.203185 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-4svmb"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.210220 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-wh2n2"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.217389 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-create-h2qp6"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.224512 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-1802-account-create-update-tfzgc"] Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.561079 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c65caa0-11c0-4a4d-b58d-cb17efd01928" path="/var/lib/kubelet/pods/0c65caa0-11c0-4a4d-b58d-cb17efd01928/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.562286 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17f2eea9-c362-4195-a8fc-8d392d045f4f" path="/var/lib/kubelet/pods/17f2eea9-c362-4195-a8fc-8d392d045f4f/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.563428 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="366ad881-59b1-434f-b500-3cb185421ebe" path="/var/lib/kubelet/pods/366ad881-59b1-434f-b500-3cb185421ebe/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.565078 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39d51baf-c92a-413f-9257-facc87ce7084" path="/var/lib/kubelet/pods/39d51baf-c92a-413f-9257-facc87ce7084/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.567406 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ac4b779-c2dd-4da8-a15d-e3d8ad165510" path="/var/lib/kubelet/pods/5ac4b779-c2dd-4da8-a15d-e3d8ad165510/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.568804 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6acb750d-2221-421a-af1c-dfe569427350" path="/var/lib/kubelet/pods/6acb750d-2221-421a-af1c-dfe569427350/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.569860 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96ac9638-e157-43ba-b12c-96b502226293" path="/var/lib/kubelet/pods/96ac9638-e157-43ba-b12c-96b502226293/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.571288 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98cbdc65-cc24-4a81-899b-66de1d1a6ca3" path="/var/lib/kubelet/pods/98cbdc65-cc24-4a81-899b-66de1d1a6ca3/volumes" Dec 02 19:07:47 crc kubenswrapper[4792]: I1202 19:07:47.572342 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f80d3801-ef59-4680-9736-dc6d78d1b7d8" path="/var/lib/kubelet/pods/f80d3801-ef59-4680-9736-dc6d78d1b7d8/volumes" Dec 02 19:07:52 crc kubenswrapper[4792]: I1202 19:07:52.032343 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-jj9jh"] Dec 02 19:07:52 crc kubenswrapper[4792]: I1202 19:07:52.052570 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-jj9jh"] Dec 02 19:07:53 crc kubenswrapper[4792]: I1202 19:07:53.550653 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a662d52b-8774-40a9-b965-ca41d5f1c6c4" path="/var/lib/kubelet/pods/a662d52b-8774-40a9-b965-ca41d5f1c6c4/volumes" Dec 02 19:08:25 crc kubenswrapper[4792]: I1202 19:08:25.059638 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-zmnrf"] Dec 02 19:08:25 crc kubenswrapper[4792]: I1202 19:08:25.067910 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-zmnrf"] Dec 02 19:08:25 crc kubenswrapper[4792]: I1202 19:08:25.562164 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13ec79b9-9006-473e-a8c9-e0cc9069d983" path="/var/lib/kubelet/pods/13ec79b9-9006-473e-a8c9-e0cc9069d983/volumes" Dec 02 19:08:35 crc kubenswrapper[4792]: I1202 19:08:35.691330 4792 scope.go:117] "RemoveContainer" containerID="179f3fc6055f21eed1f667d3cc74da7dbe9bd304358fcb7f4fb9575906985440" Dec 02 19:08:35 crc kubenswrapper[4792]: I1202 19:08:35.761204 4792 scope.go:117] "RemoveContainer" containerID="2345285abe3985cfea4ef46be28ca339b0996bf16b15db21a8d1c12e43c278b8" Dec 02 19:08:35 crc kubenswrapper[4792]: I1202 19:08:35.826226 4792 scope.go:117] "RemoveContainer" containerID="2b93d2eca1ab24fabcc6fd23b34db4f46d3f9d28d326481b9014b1ca3d52056c" Dec 02 19:08:35 crc kubenswrapper[4792]: I1202 19:08:35.870795 4792 scope.go:117] "RemoveContainer" containerID="ab2b5633405cead41e2b55d9f149b430f0281e4ed51a6b54c58884baa8ffadc5" Dec 02 19:08:35 crc kubenswrapper[4792]: I1202 19:08:35.904074 4792 scope.go:117] "RemoveContainer" containerID="3256698347e69cbc8074b6f33febc449877278b05f18e0bc6b4ee7599aba0d70" Dec 02 19:08:35 crc kubenswrapper[4792]: I1202 19:08:35.935450 4792 scope.go:117] "RemoveContainer" containerID="e13249fc8d8ebedf700437f85ac25bdd37d4993ffeddb7cb15cb6969849a90a3" Dec 02 19:08:35 crc kubenswrapper[4792]: I1202 19:08:35.988333 4792 scope.go:117] "RemoveContainer" containerID="a57f2be1b93544777aba8e2b1c6ff1a1ab422cb15451bc5e1c51aabbac78894d" Dec 02 19:08:36 crc kubenswrapper[4792]: I1202 19:08:36.020448 4792 scope.go:117] "RemoveContainer" containerID="971f40e79711af1836c9a7d7182b7f0b4e134e768c6f4d8c65b27d9c1bb80c7e" Dec 02 19:08:36 crc kubenswrapper[4792]: I1202 19:08:36.043129 4792 scope.go:117] "RemoveContainer" containerID="d1f9423d663b6e88b27e567000497de0316460778ab9ffbfa868db89f6cceba0" Dec 02 19:08:36 crc kubenswrapper[4792]: I1202 19:08:36.068713 4792 scope.go:117] "RemoveContainer" containerID="5e1cfb116ed3cac0798487d859c3c197e31c43a358a6c47449b7a6cd073e7a0f" Dec 02 19:08:36 crc kubenswrapper[4792]: I1202 19:08:36.091002 4792 scope.go:117] "RemoveContainer" containerID="06d4b02acfe1120ad569c8d49725276df8e5f5458989c1b57b84bade5c8f3d65" Dec 02 19:08:36 crc kubenswrapper[4792]: I1202 19:08:36.114548 4792 scope.go:117] "RemoveContainer" containerID="88e22d4562a0ec990d2aca3a63183106a1dda487d925ddf9ef0a75c735e1e6a6" Dec 02 19:08:36 crc kubenswrapper[4792]: I1202 19:08:36.159356 4792 scope.go:117] "RemoveContainer" containerID="adc67c19fa767df57299a569e428f3fbccf0407769242da70dc7bcb7f38acb78" Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.066586 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-dc5j7"] Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.085660 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-587gx"] Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.094420 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-587gx"] Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.121971 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-dc5j7"] Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.138578 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-455zq"] Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.149292 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-455zq"] Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.572892 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477f2280-d198-47f0-8f7a-cd76106d9f35" path="/var/lib/kubelet/pods/477f2280-d198-47f0-8f7a-cd76106d9f35/volumes" Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.574673 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="617ad28d-5e59-4407-91e7-740824d3ce43" path="/var/lib/kubelet/pods/617ad28d-5e59-4407-91e7-740824d3ce43/volumes" Dec 02 19:08:43 crc kubenswrapper[4792]: I1202 19:08:43.575852 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df41e02b-ae80-4435-bcd8-df8b9549e73d" path="/var/lib/kubelet/pods/df41e02b-ae80-4435-bcd8-df8b9549e73d/volumes" Dec 02 19:08:56 crc kubenswrapper[4792]: I1202 19:08:56.036976 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-t5xgj"] Dec 02 19:08:56 crc kubenswrapper[4792]: I1202 19:08:56.047750 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-t5xgj"] Dec 02 19:08:56 crc kubenswrapper[4792]: I1202 19:08:56.778066 4792 generic.go:334] "Generic (PLEG): container finished" podID="6263b59c-7edd-49eb-aac3-42fd1c5da951" containerID="d2e8a2ebefb53f4e8ef7d195b670e39bda2d8577f0392c6d4c371ac146ca63a3" exitCode=0 Dec 02 19:08:56 crc kubenswrapper[4792]: I1202 19:08:56.778272 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" event={"ID":"6263b59c-7edd-49eb-aac3-42fd1c5da951","Type":"ContainerDied","Data":"d2e8a2ebefb53f4e8ef7d195b670e39bda2d8577f0392c6d4c371ac146ca63a3"} Dec 02 19:08:57 crc kubenswrapper[4792]: I1202 19:08:57.552674 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50f2a0fa-fcf2-4f6c-be51-b78ae811fce5" path="/var/lib/kubelet/pods/50f2a0fa-fcf2-4f6c-be51-b78ae811fce5/volumes" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.375067 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.467486 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-inventory\") pod \"6263b59c-7edd-49eb-aac3-42fd1c5da951\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.467604 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-ssh-key\") pod \"6263b59c-7edd-49eb-aac3-42fd1c5da951\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.467673 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99dbs\" (UniqueName: \"kubernetes.io/projected/6263b59c-7edd-49eb-aac3-42fd1c5da951-kube-api-access-99dbs\") pod \"6263b59c-7edd-49eb-aac3-42fd1c5da951\" (UID: \"6263b59c-7edd-49eb-aac3-42fd1c5da951\") " Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.484837 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6263b59c-7edd-49eb-aac3-42fd1c5da951-kube-api-access-99dbs" (OuterVolumeSpecName: "kube-api-access-99dbs") pod "6263b59c-7edd-49eb-aac3-42fd1c5da951" (UID: "6263b59c-7edd-49eb-aac3-42fd1c5da951"). InnerVolumeSpecName "kube-api-access-99dbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.509347 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6263b59c-7edd-49eb-aac3-42fd1c5da951" (UID: "6263b59c-7edd-49eb-aac3-42fd1c5da951"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.526261 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-inventory" (OuterVolumeSpecName: "inventory") pod "6263b59c-7edd-49eb-aac3-42fd1c5da951" (UID: "6263b59c-7edd-49eb-aac3-42fd1c5da951"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.570076 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.570116 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6263b59c-7edd-49eb-aac3-42fd1c5da951-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.570130 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99dbs\" (UniqueName: \"kubernetes.io/projected/6263b59c-7edd-49eb-aac3-42fd1c5da951-kube-api-access-99dbs\") on node \"crc\" DevicePath \"\"" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.808997 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" event={"ID":"6263b59c-7edd-49eb-aac3-42fd1c5da951","Type":"ContainerDied","Data":"8bb4882b189a836938c0f08b6ee073d04f3865d275ee261d787673b514de03ab"} Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.809080 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bb4882b189a836938c0f08b6ee073d04f3865d275ee261d787673b514de03ab" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.809170 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.919806 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b"] Dec 02 19:08:58 crc kubenswrapper[4792]: E1202 19:08:58.920403 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6263b59c-7edd-49eb-aac3-42fd1c5da951" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.920427 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6263b59c-7edd-49eb-aac3-42fd1c5da951" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.920749 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6263b59c-7edd-49eb-aac3-42fd1c5da951" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.921799 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.925462 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.925635 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.925779 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.926042 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.929288 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b"] Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.977468 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc985\" (UniqueName: \"kubernetes.io/projected/e3237d97-ad57-4313-8210-fa48b0740a3c-kube-api-access-pc985\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.978030 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:58 crc kubenswrapper[4792]: I1202 19:08:58.978103 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.080077 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc985\" (UniqueName: \"kubernetes.io/projected/e3237d97-ad57-4313-8210-fa48b0740a3c-kube-api-access-pc985\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.080210 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.080286 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.085166 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.086085 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.098035 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc985\" (UniqueName: \"kubernetes.io/projected/e3237d97-ad57-4313-8210-fa48b0740a3c-kube-api-access-pc985\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.241156 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.921577 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b"] Dec 02 19:08:59 crc kubenswrapper[4792]: I1202 19:08:59.925590 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:09:00 crc kubenswrapper[4792]: I1202 19:09:00.832929 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" event={"ID":"e3237d97-ad57-4313-8210-fa48b0740a3c","Type":"ContainerStarted","Data":"11232decf9d84cceb222e94ef285620ab37f3b6cd680ffb3033ad7483582c652"} Dec 02 19:09:00 crc kubenswrapper[4792]: I1202 19:09:00.833396 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" event={"ID":"e3237d97-ad57-4313-8210-fa48b0740a3c","Type":"ContainerStarted","Data":"db7661810a3f145fed82ce37bf4db2ca31bdf06116f19d5c9b183825225042a8"} Dec 02 19:09:00 crc kubenswrapper[4792]: I1202 19:09:00.866183 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" podStartSLOduration=2.273466401 podStartE2EDuration="2.866162713s" podCreationTimestamp="2025-12-02 19:08:58 +0000 UTC" firstStartedPulling="2025-12-02 19:08:59.925369067 +0000 UTC m=+1970.698261395" lastFinishedPulling="2025-12-02 19:09:00.518065359 +0000 UTC m=+1971.290957707" observedRunningTime="2025-12-02 19:09:00.853449991 +0000 UTC m=+1971.626342319" watchObservedRunningTime="2025-12-02 19:09:00.866162713 +0000 UTC m=+1971.639055041" Dec 02 19:09:35 crc kubenswrapper[4792]: I1202 19:09:35.048911 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-ccnq7"] Dec 02 19:09:35 crc kubenswrapper[4792]: I1202 19:09:35.066177 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-ccnq7"] Dec 02 19:09:35 crc kubenswrapper[4792]: I1202 19:09:35.560728 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2675e4dc-601e-4d2f-9fe9-db69ca73b109" path="/var/lib/kubelet/pods/2675e4dc-601e-4d2f-9fe9-db69ca73b109/volumes" Dec 02 19:09:36 crc kubenswrapper[4792]: I1202 19:09:36.066131 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-lh6kq"] Dec 02 19:09:36 crc kubenswrapper[4792]: I1202 19:09:36.085909 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-lh6kq"] Dec 02 19:09:36 crc kubenswrapper[4792]: I1202 19:09:36.416311 4792 scope.go:117] "RemoveContainer" containerID="a221f5350151ff73453af64669d33484cd1eed2a9842abb2fa574e9db8014099" Dec 02 19:09:36 crc kubenswrapper[4792]: I1202 19:09:36.495113 4792 scope.go:117] "RemoveContainer" containerID="274afdc421cc4391c06ee206f88586ecc3083970599609f2ee9dbf95c56f893b" Dec 02 19:09:36 crc kubenswrapper[4792]: I1202 19:09:36.544201 4792 scope.go:117] "RemoveContainer" containerID="c54b987c8c922ff0e0f587c87930474283cf03e99ed0e59de82bbccb35f85fed" Dec 02 19:09:36 crc kubenswrapper[4792]: I1202 19:09:36.601082 4792 scope.go:117] "RemoveContainer" containerID="44560b614e0659c0a75630ffb0ac7c21ecd415bd18f4831b5ac1023d88282289" Dec 02 19:09:36 crc kubenswrapper[4792]: I1202 19:09:36.658873 4792 scope.go:117] "RemoveContainer" containerID="97a281cfc08d61b59207510a6ebdd6a9f22255a670c1c310d234606d7e3a07cf" Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.046433 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-1663-account-create-update-8xcft"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.059087 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-sbnrq"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.070171 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-4ebd-account-create-update-qqlvf"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.080206 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-426b-account-create-update-7chxn"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.088355 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-1663-account-create-update-8xcft"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.096905 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-4ebd-account-create-update-qqlvf"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.104680 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-426b-account-create-update-7chxn"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.112895 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-sbnrq"] Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.558326 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68e98332-53c4-4f16-85cb-608e7b01e41b" path="/var/lib/kubelet/pods/68e98332-53c4-4f16-85cb-608e7b01e41b/volumes" Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.559174 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80838be7-b865-482d-a009-3338e3328a3d" path="/var/lib/kubelet/pods/80838be7-b865-482d-a009-3338e3328a3d/volumes" Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.561589 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ebdf59-d7dc-4bcd-ace8-76dadb52d06a" path="/var/lib/kubelet/pods/84ebdf59-d7dc-4bcd-ace8-76dadb52d06a/volumes" Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.562307 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85b7260d-e208-454c-b9c3-2de2ff32d356" path="/var/lib/kubelet/pods/85b7260d-e208-454c-b9c3-2de2ff32d356/volumes" Dec 02 19:09:37 crc kubenswrapper[4792]: I1202 19:09:37.563259 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3c55368-83f1-4bc7-921d-44111791eb23" path="/var/lib/kubelet/pods/e3c55368-83f1-4bc7-921d-44111791eb23/volumes" Dec 02 19:09:38 crc kubenswrapper[4792]: I1202 19:09:38.081100 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:09:38 crc kubenswrapper[4792]: I1202 19:09:38.081580 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:10:08 crc kubenswrapper[4792]: I1202 19:10:08.047803 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jh2dc"] Dec 02 19:10:08 crc kubenswrapper[4792]: I1202 19:10:08.057115 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jh2dc"] Dec 02 19:10:08 crc kubenswrapper[4792]: I1202 19:10:08.083838 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:10:08 crc kubenswrapper[4792]: I1202 19:10:08.083906 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:10:09 crc kubenswrapper[4792]: I1202 19:10:09.574325 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c750063f-56d2-47d7-9237-a7ab6a26f6a2" path="/var/lib/kubelet/pods/c750063f-56d2-47d7-9237-a7ab6a26f6a2/volumes" Dec 02 19:10:18 crc kubenswrapper[4792]: I1202 19:10:18.054231 4792 generic.go:334] "Generic (PLEG): container finished" podID="e3237d97-ad57-4313-8210-fa48b0740a3c" containerID="11232decf9d84cceb222e94ef285620ab37f3b6cd680ffb3033ad7483582c652" exitCode=0 Dec 02 19:10:18 crc kubenswrapper[4792]: I1202 19:10:18.054293 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" event={"ID":"e3237d97-ad57-4313-8210-fa48b0740a3c","Type":"ContainerDied","Data":"11232decf9d84cceb222e94ef285620ab37f3b6cd680ffb3033ad7483582c652"} Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.624661 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.724880 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-ssh-key\") pod \"e3237d97-ad57-4313-8210-fa48b0740a3c\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.725317 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-inventory\") pod \"e3237d97-ad57-4313-8210-fa48b0740a3c\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.725358 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc985\" (UniqueName: \"kubernetes.io/projected/e3237d97-ad57-4313-8210-fa48b0740a3c-kube-api-access-pc985\") pod \"e3237d97-ad57-4313-8210-fa48b0740a3c\" (UID: \"e3237d97-ad57-4313-8210-fa48b0740a3c\") " Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.731786 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3237d97-ad57-4313-8210-fa48b0740a3c-kube-api-access-pc985" (OuterVolumeSpecName: "kube-api-access-pc985") pod "e3237d97-ad57-4313-8210-fa48b0740a3c" (UID: "e3237d97-ad57-4313-8210-fa48b0740a3c"). InnerVolumeSpecName "kube-api-access-pc985". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.755414 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e3237d97-ad57-4313-8210-fa48b0740a3c" (UID: "e3237d97-ad57-4313-8210-fa48b0740a3c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.756137 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-inventory" (OuterVolumeSpecName: "inventory") pod "e3237d97-ad57-4313-8210-fa48b0740a3c" (UID: "e3237d97-ad57-4313-8210-fa48b0740a3c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.829271 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.829303 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e3237d97-ad57-4313-8210-fa48b0740a3c-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:10:19 crc kubenswrapper[4792]: I1202 19:10:19.829314 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc985\" (UniqueName: \"kubernetes.io/projected/e3237d97-ad57-4313-8210-fa48b0740a3c-kube-api-access-pc985\") on node \"crc\" DevicePath \"\"" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.083340 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" event={"ID":"e3237d97-ad57-4313-8210-fa48b0740a3c","Type":"ContainerDied","Data":"db7661810a3f145fed82ce37bf4db2ca31bdf06116f19d5c9b183825225042a8"} Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.083375 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db7661810a3f145fed82ce37bf4db2ca31bdf06116f19d5c9b183825225042a8" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.083435 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.203659 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49"] Dec 02 19:10:20 crc kubenswrapper[4792]: E1202 19:10:20.204264 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3237d97-ad57-4313-8210-fa48b0740a3c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.204288 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3237d97-ad57-4313-8210-fa48b0740a3c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.204539 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3237d97-ad57-4313-8210-fa48b0740a3c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.205425 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.208938 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.211499 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.211513 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.211511 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.254561 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49"] Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.345700 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.346046 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.346131 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glmf6\" (UniqueName: \"kubernetes.io/projected/c16326ab-5471-4840-98cc-670d5601a873-kube-api-access-glmf6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.449070 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.449448 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.449633 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glmf6\" (UniqueName: \"kubernetes.io/projected/c16326ab-5471-4840-98cc-670d5601a873-kube-api-access-glmf6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.455125 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.459474 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.467628 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glmf6\" (UniqueName: \"kubernetes.io/projected/c16326ab-5471-4840-98cc-670d5601a873-kube-api-access-glmf6\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-t4l49\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:20 crc kubenswrapper[4792]: I1202 19:10:20.525040 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:21 crc kubenswrapper[4792]: I1202 19:10:21.199186 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49"] Dec 02 19:10:21 crc kubenswrapper[4792]: W1202 19:10:21.200090 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc16326ab_5471_4840_98cc_670d5601a873.slice/crio-c8c8ab8fc9783e5fd42555c25d36547b0390810048553cc7ca113c3b5da8994e WatchSource:0}: Error finding container c8c8ab8fc9783e5fd42555c25d36547b0390810048553cc7ca113c3b5da8994e: Status 404 returned error can't find the container with id c8c8ab8fc9783e5fd42555c25d36547b0390810048553cc7ca113c3b5da8994e Dec 02 19:10:22 crc kubenswrapper[4792]: I1202 19:10:22.135576 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" event={"ID":"c16326ab-5471-4840-98cc-670d5601a873","Type":"ContainerStarted","Data":"f3f89c4682f47665839a551186370d0eb73d1f9e9e69ce9f4a3fac2fc8ef8433"} Dec 02 19:10:22 crc kubenswrapper[4792]: I1202 19:10:22.136164 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" event={"ID":"c16326ab-5471-4840-98cc-670d5601a873","Type":"ContainerStarted","Data":"c8c8ab8fc9783e5fd42555c25d36547b0390810048553cc7ca113c3b5da8994e"} Dec 02 19:10:22 crc kubenswrapper[4792]: I1202 19:10:22.156474 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" podStartSLOduration=1.59148172 podStartE2EDuration="2.156451059s" podCreationTimestamp="2025-12-02 19:10:20 +0000 UTC" firstStartedPulling="2025-12-02 19:10:21.203065193 +0000 UTC m=+2051.975957541" lastFinishedPulling="2025-12-02 19:10:21.768034542 +0000 UTC m=+2052.540926880" observedRunningTime="2025-12-02 19:10:22.153868342 +0000 UTC m=+2052.926760700" watchObservedRunningTime="2025-12-02 19:10:22.156451059 +0000 UTC m=+2052.929343407" Dec 02 19:10:27 crc kubenswrapper[4792]: I1202 19:10:27.196223 4792 generic.go:334] "Generic (PLEG): container finished" podID="c16326ab-5471-4840-98cc-670d5601a873" containerID="f3f89c4682f47665839a551186370d0eb73d1f9e9e69ce9f4a3fac2fc8ef8433" exitCode=0 Dec 02 19:10:27 crc kubenswrapper[4792]: I1202 19:10:27.196444 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" event={"ID":"c16326ab-5471-4840-98cc-670d5601a873","Type":"ContainerDied","Data":"f3f89c4682f47665839a551186370d0eb73d1f9e9e69ce9f4a3fac2fc8ef8433"} Dec 02 19:10:28 crc kubenswrapper[4792]: I1202 19:10:28.789531 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:28 crc kubenswrapper[4792]: I1202 19:10:28.964653 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-ssh-key\") pod \"c16326ab-5471-4840-98cc-670d5601a873\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " Dec 02 19:10:28 crc kubenswrapper[4792]: I1202 19:10:28.964737 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glmf6\" (UniqueName: \"kubernetes.io/projected/c16326ab-5471-4840-98cc-670d5601a873-kube-api-access-glmf6\") pod \"c16326ab-5471-4840-98cc-670d5601a873\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " Dec 02 19:10:28 crc kubenswrapper[4792]: I1202 19:10:28.964917 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-inventory\") pod \"c16326ab-5471-4840-98cc-670d5601a873\" (UID: \"c16326ab-5471-4840-98cc-670d5601a873\") " Dec 02 19:10:28 crc kubenswrapper[4792]: I1202 19:10:28.970300 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c16326ab-5471-4840-98cc-670d5601a873-kube-api-access-glmf6" (OuterVolumeSpecName: "kube-api-access-glmf6") pod "c16326ab-5471-4840-98cc-670d5601a873" (UID: "c16326ab-5471-4840-98cc-670d5601a873"). InnerVolumeSpecName "kube-api-access-glmf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.007223 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c16326ab-5471-4840-98cc-670d5601a873" (UID: "c16326ab-5471-4840-98cc-670d5601a873"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.009136 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-inventory" (OuterVolumeSpecName: "inventory") pod "c16326ab-5471-4840-98cc-670d5601a873" (UID: "c16326ab-5471-4840-98cc-670d5601a873"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.067565 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.067595 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glmf6\" (UniqueName: \"kubernetes.io/projected/c16326ab-5471-4840-98cc-670d5601a873-kube-api-access-glmf6\") on node \"crc\" DevicePath \"\"" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.067624 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c16326ab-5471-4840-98cc-670d5601a873-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.224444 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" event={"ID":"c16326ab-5471-4840-98cc-670d5601a873","Type":"ContainerDied","Data":"c8c8ab8fc9783e5fd42555c25d36547b0390810048553cc7ca113c3b5da8994e"} Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.224828 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8c8ab8fc9783e5fd42555c25d36547b0390810048553cc7ca113c3b5da8994e" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.224493 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-t4l49" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.305169 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g"] Dec 02 19:10:29 crc kubenswrapper[4792]: E1202 19:10:29.305560 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c16326ab-5471-4840-98cc-670d5601a873" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.305578 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c16326ab-5471-4840-98cc-670d5601a873" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.305815 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c16326ab-5471-4840-98cc-670d5601a873" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.306602 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.312073 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.312377 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.312408 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.317080 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.323768 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g"] Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.475618 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.475783 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4bn8\" (UniqueName: \"kubernetes.io/projected/927db467-4fe0-45db-bdfa-9f8de3f72259-kube-api-access-w4bn8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.476240 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.579471 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4bn8\" (UniqueName: \"kubernetes.io/projected/927db467-4fe0-45db-bdfa-9f8de3f72259-kube-api-access-w4bn8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.579759 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.579879 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.585854 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.587324 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.614221 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4bn8\" (UniqueName: \"kubernetes.io/projected/927db467-4fe0-45db-bdfa-9f8de3f72259-kube-api-access-w4bn8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s9m9g\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:29 crc kubenswrapper[4792]: I1202 19:10:29.636399 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:10:30 crc kubenswrapper[4792]: I1202 19:10:30.220857 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g"] Dec 02 19:10:30 crc kubenswrapper[4792]: W1202 19:10:30.223598 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod927db467_4fe0_45db_bdfa_9f8de3f72259.slice/crio-fcef0b1ee7a5d4e0752c5d0b5ce8d363bb1f4dbb195bd6d667cef444e58cce22 WatchSource:0}: Error finding container fcef0b1ee7a5d4e0752c5d0b5ce8d363bb1f4dbb195bd6d667cef444e58cce22: Status 404 returned error can't find the container with id fcef0b1ee7a5d4e0752c5d0b5ce8d363bb1f4dbb195bd6d667cef444e58cce22 Dec 02 19:10:30 crc kubenswrapper[4792]: I1202 19:10:30.240460 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" event={"ID":"927db467-4fe0-45db-bdfa-9f8de3f72259","Type":"ContainerStarted","Data":"fcef0b1ee7a5d4e0752c5d0b5ce8d363bb1f4dbb195bd6d667cef444e58cce22"} Dec 02 19:10:31 crc kubenswrapper[4792]: I1202 19:10:31.047214 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-428v5"] Dec 02 19:10:31 crc kubenswrapper[4792]: I1202 19:10:31.061647 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-428v5"] Dec 02 19:10:31 crc kubenswrapper[4792]: I1202 19:10:31.259822 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" event={"ID":"927db467-4fe0-45db-bdfa-9f8de3f72259","Type":"ContainerStarted","Data":"94d61507117aa743b4cf4b914402f5683063039b6a006aec069622140158479e"} Dec 02 19:10:31 crc kubenswrapper[4792]: I1202 19:10:31.278735 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" podStartSLOduration=1.884971054 podStartE2EDuration="2.278717101s" podCreationTimestamp="2025-12-02 19:10:29 +0000 UTC" firstStartedPulling="2025-12-02 19:10:30.227097528 +0000 UTC m=+2060.999989896" lastFinishedPulling="2025-12-02 19:10:30.620843585 +0000 UTC m=+2061.393735943" observedRunningTime="2025-12-02 19:10:31.27562666 +0000 UTC m=+2062.048518988" watchObservedRunningTime="2025-12-02 19:10:31.278717101 +0000 UTC m=+2062.051609429" Dec 02 19:10:31 crc kubenswrapper[4792]: I1202 19:10:31.558121 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82858e4f-bc42-4839-8d38-9f7f1772a089" path="/var/lib/kubelet/pods/82858e4f-bc42-4839-8d38-9f7f1772a089/volumes" Dec 02 19:10:36 crc kubenswrapper[4792]: I1202 19:10:36.040595 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-glqwc"] Dec 02 19:10:36 crc kubenswrapper[4792]: I1202 19:10:36.081613 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-glqwc"] Dec 02 19:10:36 crc kubenswrapper[4792]: I1202 19:10:36.834597 4792 scope.go:117] "RemoveContainer" containerID="96c8a20700cfe76e4fea8b6e0ea62097ccb2d41a1004ddb04a056d9f913ee1a7" Dec 02 19:10:36 crc kubenswrapper[4792]: I1202 19:10:36.876377 4792 scope.go:117] "RemoveContainer" containerID="059dea24a93777e8673f82fa81f07ee95814961213e6b30f17bbc62c0cad0ed3" Dec 02 19:10:36 crc kubenswrapper[4792]: I1202 19:10:36.980001 4792 scope.go:117] "RemoveContainer" containerID="613ff8dfa3ac486cbef5560a93d5b8e8fbc70b7ef735f6c4f86b5be131b12ded" Dec 02 19:10:37 crc kubenswrapper[4792]: I1202 19:10:37.060906 4792 scope.go:117] "RemoveContainer" containerID="96a66e081d2b958701ad9e4cf60c5b3a4b3623f12c93def8e8e345258766272f" Dec 02 19:10:37 crc kubenswrapper[4792]: I1202 19:10:37.089099 4792 scope.go:117] "RemoveContainer" containerID="91d316db9ad2573185af2ae8cdfc729352152d73d593db5d1cfcd17bfa3d669a" Dec 02 19:10:37 crc kubenswrapper[4792]: I1202 19:10:37.130708 4792 scope.go:117] "RemoveContainer" containerID="d535be81f937d6a08c822cc0ef5cceb6647f45b1d2b8a0f903453fda1bcc3b5e" Dec 02 19:10:37 crc kubenswrapper[4792]: I1202 19:10:37.178564 4792 scope.go:117] "RemoveContainer" containerID="0dc8f4ae80975827647fd7ba656d100c9c201b93d36d62e17511fbed9a2ebae4" Dec 02 19:10:37 crc kubenswrapper[4792]: I1202 19:10:37.565816 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67c9d25a-3961-4b7d-bad6-340311a55dde" path="/var/lib/kubelet/pods/67c9d25a-3961-4b7d-bad6-340311a55dde/volumes" Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.081720 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.082842 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.083079 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.084376 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11788464675fa1d7887d1f27536456407985fbf82e43eb7551c12897d3a408e0"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.084645 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://11788464675fa1d7887d1f27536456407985fbf82e43eb7551c12897d3a408e0" gracePeriod=600 Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.371006 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="11788464675fa1d7887d1f27536456407985fbf82e43eb7551c12897d3a408e0" exitCode=0 Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.371046 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"11788464675fa1d7887d1f27536456407985fbf82e43eb7551c12897d3a408e0"} Dec 02 19:10:38 crc kubenswrapper[4792]: I1202 19:10:38.371131 4792 scope.go:117] "RemoveContainer" containerID="1bd3680d8cabbe09313bc8e9bfcfc7ae4c8aef120bcf8851a5536ed5b3144523" Dec 02 19:10:39 crc kubenswrapper[4792]: I1202 19:10:39.389044 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e"} Dec 02 19:10:55 crc kubenswrapper[4792]: I1202 19:10:55.120162 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-79bc665747-kkc2q" podUID="e3263958-3718-4ceb-8751-6fa73a1a60f5" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 02 19:11:14 crc kubenswrapper[4792]: I1202 19:11:14.817380 4792 generic.go:334] "Generic (PLEG): container finished" podID="927db467-4fe0-45db-bdfa-9f8de3f72259" containerID="94d61507117aa743b4cf4b914402f5683063039b6a006aec069622140158479e" exitCode=0 Dec 02 19:11:14 crc kubenswrapper[4792]: I1202 19:11:14.817467 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" event={"ID":"927db467-4fe0-45db-bdfa-9f8de3f72259","Type":"ContainerDied","Data":"94d61507117aa743b4cf4b914402f5683063039b6a006aec069622140158479e"} Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.047586 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-6nrq2"] Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.055269 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-6nrq2"] Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.415731 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.509081 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4bn8\" (UniqueName: \"kubernetes.io/projected/927db467-4fe0-45db-bdfa-9f8de3f72259-kube-api-access-w4bn8\") pod \"927db467-4fe0-45db-bdfa-9f8de3f72259\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.509204 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-ssh-key\") pod \"927db467-4fe0-45db-bdfa-9f8de3f72259\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.509247 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-inventory\") pod \"927db467-4fe0-45db-bdfa-9f8de3f72259\" (UID: \"927db467-4fe0-45db-bdfa-9f8de3f72259\") " Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.515053 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/927db467-4fe0-45db-bdfa-9f8de3f72259-kube-api-access-w4bn8" (OuterVolumeSpecName: "kube-api-access-w4bn8") pod "927db467-4fe0-45db-bdfa-9f8de3f72259" (UID: "927db467-4fe0-45db-bdfa-9f8de3f72259"). InnerVolumeSpecName "kube-api-access-w4bn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.545652 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-inventory" (OuterVolumeSpecName: "inventory") pod "927db467-4fe0-45db-bdfa-9f8de3f72259" (UID: "927db467-4fe0-45db-bdfa-9f8de3f72259"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.554705 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "927db467-4fe0-45db-bdfa-9f8de3f72259" (UID: "927db467-4fe0-45db-bdfa-9f8de3f72259"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.611609 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4bn8\" (UniqueName: \"kubernetes.io/projected/927db467-4fe0-45db-bdfa-9f8de3f72259-kube-api-access-w4bn8\") on node \"crc\" DevicePath \"\"" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.611638 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.611648 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/927db467-4fe0-45db-bdfa-9f8de3f72259-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.844565 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" event={"ID":"927db467-4fe0-45db-bdfa-9f8de3f72259","Type":"ContainerDied","Data":"fcef0b1ee7a5d4e0752c5d0b5ce8d363bb1f4dbb195bd6d667cef444e58cce22"} Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.844627 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s9m9g" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.844630 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcef0b1ee7a5d4e0752c5d0b5ce8d363bb1f4dbb195bd6d667cef444e58cce22" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.963649 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695"] Dec 02 19:11:16 crc kubenswrapper[4792]: E1202 19:11:16.964645 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927db467-4fe0-45db-bdfa-9f8de3f72259" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.964685 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="927db467-4fe0-45db-bdfa-9f8de3f72259" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.965075 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="927db467-4fe0-45db-bdfa-9f8de3f72259" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.966388 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.968756 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.968808 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.968986 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.969457 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:11:16 crc kubenswrapper[4792]: I1202 19:11:16.980806 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695"] Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.124109 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j2fz\" (UniqueName: \"kubernetes.io/projected/81a6ef07-63e3-4982-896f-c40102622a62-kube-api-access-7j2fz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.124329 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.124433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.227169 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j2fz\" (UniqueName: \"kubernetes.io/projected/81a6ef07-63e3-4982-896f-c40102622a62-kube-api-access-7j2fz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.227649 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.227701 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.232534 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.235949 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.264129 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j2fz\" (UniqueName: \"kubernetes.io/projected/81a6ef07-63e3-4982-896f-c40102622a62-kube-api-access-7j2fz\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z8695\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.290206 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:11:17 crc kubenswrapper[4792]: I1202 19:11:17.551775 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914" path="/var/lib/kubelet/pods/25cab2e0-7af1-4f0e-a7b2-7d5f6b2be914/volumes" Dec 02 19:11:18 crc kubenswrapper[4792]: I1202 19:11:18.168153 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695"] Dec 02 19:11:18 crc kubenswrapper[4792]: I1202 19:11:18.875694 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" event={"ID":"81a6ef07-63e3-4982-896f-c40102622a62","Type":"ContainerStarted","Data":"565a8eb21e780462c66b0baa8d252f47fe024674920b012353722f7fc4fba97e"} Dec 02 19:11:19 crc kubenswrapper[4792]: I1202 19:11:19.888743 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" event={"ID":"81a6ef07-63e3-4982-896f-c40102622a62","Type":"ContainerStarted","Data":"2aad1fd56212ace3e8556fd9a6ebe4c5ad4e99c1b329570ca1e01321754924e9"} Dec 02 19:11:19 crc kubenswrapper[4792]: I1202 19:11:19.920788 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" podStartSLOduration=3.250675337 podStartE2EDuration="3.920763633s" podCreationTimestamp="2025-12-02 19:11:16 +0000 UTC" firstStartedPulling="2025-12-02 19:11:18.174106623 +0000 UTC m=+2108.946998951" lastFinishedPulling="2025-12-02 19:11:18.844194919 +0000 UTC m=+2109.617087247" observedRunningTime="2025-12-02 19:11:19.906680055 +0000 UTC m=+2110.679572413" watchObservedRunningTime="2025-12-02 19:11:19.920763633 +0000 UTC m=+2110.693655961" Dec 02 19:11:37 crc kubenswrapper[4792]: I1202 19:11:37.392783 4792 scope.go:117] "RemoveContainer" containerID="14faba9f1d40d34af783d176eeccdc6188d9f4eae94ea9eebb44eb873cfae59d" Dec 02 19:11:37 crc kubenswrapper[4792]: I1202 19:11:37.461344 4792 scope.go:117] "RemoveContainer" containerID="9dd237969616cdf7de0455b8afae5553788744c495490aaf1e021952c331f415" Dec 02 19:12:19 crc kubenswrapper[4792]: I1202 19:12:19.623972 4792 generic.go:334] "Generic (PLEG): container finished" podID="81a6ef07-63e3-4982-896f-c40102622a62" containerID="2aad1fd56212ace3e8556fd9a6ebe4c5ad4e99c1b329570ca1e01321754924e9" exitCode=0 Dec 02 19:12:19 crc kubenswrapper[4792]: I1202 19:12:19.624022 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" event={"ID":"81a6ef07-63e3-4982-896f-c40102622a62","Type":"ContainerDied","Data":"2aad1fd56212ace3e8556fd9a6ebe4c5ad4e99c1b329570ca1e01321754924e9"} Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.231323 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.362568 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-ssh-key\") pod \"81a6ef07-63e3-4982-896f-c40102622a62\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.362614 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-inventory\") pod \"81a6ef07-63e3-4982-896f-c40102622a62\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.362809 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j2fz\" (UniqueName: \"kubernetes.io/projected/81a6ef07-63e3-4982-896f-c40102622a62-kube-api-access-7j2fz\") pod \"81a6ef07-63e3-4982-896f-c40102622a62\" (UID: \"81a6ef07-63e3-4982-896f-c40102622a62\") " Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.375491 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81a6ef07-63e3-4982-896f-c40102622a62-kube-api-access-7j2fz" (OuterVolumeSpecName: "kube-api-access-7j2fz") pod "81a6ef07-63e3-4982-896f-c40102622a62" (UID: "81a6ef07-63e3-4982-896f-c40102622a62"). InnerVolumeSpecName "kube-api-access-7j2fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.391643 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "81a6ef07-63e3-4982-896f-c40102622a62" (UID: "81a6ef07-63e3-4982-896f-c40102622a62"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.392750 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-inventory" (OuterVolumeSpecName: "inventory") pod "81a6ef07-63e3-4982-896f-c40102622a62" (UID: "81a6ef07-63e3-4982-896f-c40102622a62"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.464982 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j2fz\" (UniqueName: \"kubernetes.io/projected/81a6ef07-63e3-4982-896f-c40102622a62-kube-api-access-7j2fz\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.465015 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.465024 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/81a6ef07-63e3-4982-896f-c40102622a62-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.648088 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" event={"ID":"81a6ef07-63e3-4982-896f-c40102622a62","Type":"ContainerDied","Data":"565a8eb21e780462c66b0baa8d252f47fe024674920b012353722f7fc4fba97e"} Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.648416 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="565a8eb21e780462c66b0baa8d252f47fe024674920b012353722f7fc4fba97e" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.648164 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z8695" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.743384 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xdsm4"] Dec 02 19:12:21 crc kubenswrapper[4792]: E1202 19:12:21.744228 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81a6ef07-63e3-4982-896f-c40102622a62" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.744251 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="81a6ef07-63e3-4982-896f-c40102622a62" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.744478 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="81a6ef07-63e3-4982-896f-c40102622a62" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.745237 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.753908 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xdsm4"] Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.787488 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khggn\" (UniqueName: \"kubernetes.io/projected/e957a792-604a-439d-8fa3-271edf600cac-kube-api-access-khggn\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.787648 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.787813 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.788308 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.788597 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.788804 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.788805 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.889728 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.889976 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khggn\" (UniqueName: \"kubernetes.io/projected/e957a792-604a-439d-8fa3-271edf600cac-kube-api-access-khggn\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.890134 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.895934 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.902732 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:21 crc kubenswrapper[4792]: I1202 19:12:21.909915 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khggn\" (UniqueName: \"kubernetes.io/projected/e957a792-604a-439d-8fa3-271edf600cac-kube-api-access-khggn\") pod \"ssh-known-hosts-edpm-deployment-xdsm4\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:22 crc kubenswrapper[4792]: I1202 19:12:22.105002 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:22 crc kubenswrapper[4792]: I1202 19:12:22.732886 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xdsm4"] Dec 02 19:12:23 crc kubenswrapper[4792]: I1202 19:12:23.688593 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" event={"ID":"e957a792-604a-439d-8fa3-271edf600cac","Type":"ContainerStarted","Data":"d4180e7e209140500c145b665e164afdd2252a6f1a10b321a95f464e56ce7515"} Dec 02 19:12:23 crc kubenswrapper[4792]: I1202 19:12:23.689281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" event={"ID":"e957a792-604a-439d-8fa3-271edf600cac","Type":"ContainerStarted","Data":"bb46ca0f59622cb5d7908a9eea4f7b9d1395c904861893c93e65e7d0d8e3834a"} Dec 02 19:12:23 crc kubenswrapper[4792]: I1202 19:12:23.718042 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" podStartSLOduration=2.288442701 podStartE2EDuration="2.718021743s" podCreationTimestamp="2025-12-02 19:12:21 +0000 UTC" firstStartedPulling="2025-12-02 19:12:22.743790842 +0000 UTC m=+2173.516683180" lastFinishedPulling="2025-12-02 19:12:23.173369854 +0000 UTC m=+2173.946262222" observedRunningTime="2025-12-02 19:12:23.706731398 +0000 UTC m=+2174.479623736" watchObservedRunningTime="2025-12-02 19:12:23.718021743 +0000 UTC m=+2174.490914081" Dec 02 19:12:30 crc kubenswrapper[4792]: I1202 19:12:30.059861 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-vj9gg"] Dec 02 19:12:30 crc kubenswrapper[4792]: I1202 19:12:30.073687 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-vj9gg"] Dec 02 19:12:31 crc kubenswrapper[4792]: I1202 19:12:31.555147 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="809cb60d-1a06-4216-8f74-882aa0d7470e" path="/var/lib/kubelet/pods/809cb60d-1a06-4216-8f74-882aa0d7470e/volumes" Dec 02 19:12:31 crc kubenswrapper[4792]: I1202 19:12:31.793579 4792 generic.go:334] "Generic (PLEG): container finished" podID="e957a792-604a-439d-8fa3-271edf600cac" containerID="d4180e7e209140500c145b665e164afdd2252a6f1a10b321a95f464e56ce7515" exitCode=0 Dec 02 19:12:31 crc kubenswrapper[4792]: I1202 19:12:31.793642 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" event={"ID":"e957a792-604a-439d-8fa3-271edf600cac","Type":"ContainerDied","Data":"d4180e7e209140500c145b665e164afdd2252a6f1a10b321a95f464e56ce7515"} Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.398484 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.484845 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-ssh-key-openstack-edpm-ipam\") pod \"e957a792-604a-439d-8fa3-271edf600cac\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.484887 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-inventory-0\") pod \"e957a792-604a-439d-8fa3-271edf600cac\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.485110 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khggn\" (UniqueName: \"kubernetes.io/projected/e957a792-604a-439d-8fa3-271edf600cac-kube-api-access-khggn\") pod \"e957a792-604a-439d-8fa3-271edf600cac\" (UID: \"e957a792-604a-439d-8fa3-271edf600cac\") " Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.521612 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e957a792-604a-439d-8fa3-271edf600cac" (UID: "e957a792-604a-439d-8fa3-271edf600cac"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.523777 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e957a792-604a-439d-8fa3-271edf600cac-kube-api-access-khggn" (OuterVolumeSpecName: "kube-api-access-khggn") pod "e957a792-604a-439d-8fa3-271edf600cac" (UID: "e957a792-604a-439d-8fa3-271edf600cac"). InnerVolumeSpecName "kube-api-access-khggn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.549691 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "e957a792-604a-439d-8fa3-271edf600cac" (UID: "e957a792-604a-439d-8fa3-271edf600cac"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.586993 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khggn\" (UniqueName: \"kubernetes.io/projected/e957a792-604a-439d-8fa3-271edf600cac-kube-api-access-khggn\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.587031 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.587041 4792 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e957a792-604a-439d-8fa3-271edf600cac-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.820924 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" event={"ID":"e957a792-604a-439d-8fa3-271edf600cac","Type":"ContainerDied","Data":"bb46ca0f59622cb5d7908a9eea4f7b9d1395c904861893c93e65e7d0d8e3834a"} Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.820978 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb46ca0f59622cb5d7908a9eea4f7b9d1395c904861893c93e65e7d0d8e3834a" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.821060 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdsm4" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.896576 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8"] Dec 02 19:12:33 crc kubenswrapper[4792]: E1202 19:12:33.897038 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e957a792-604a-439d-8fa3-271edf600cac" containerName="ssh-known-hosts-edpm-deployment" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.897061 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e957a792-604a-439d-8fa3-271edf600cac" containerName="ssh-known-hosts-edpm-deployment" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.897368 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e957a792-604a-439d-8fa3-271edf600cac" containerName="ssh-known-hosts-edpm-deployment" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.898332 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.904028 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.904742 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.905613 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.906902 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.911443 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8"] Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.994651 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sn9n\" (UniqueName: \"kubernetes.io/projected/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-kube-api-access-6sn9n\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.994927 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:33 crc kubenswrapper[4792]: I1202 19:12:33.995035 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.097035 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.097098 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.097150 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sn9n\" (UniqueName: \"kubernetes.io/projected/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-kube-api-access-6sn9n\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.100857 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.100950 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.118747 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sn9n\" (UniqueName: \"kubernetes.io/projected/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-kube-api-access-6sn9n\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bt7b8\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.264344 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:34 crc kubenswrapper[4792]: I1202 19:12:34.870384 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8"] Dec 02 19:12:35 crc kubenswrapper[4792]: I1202 19:12:35.035431 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-c5g69"] Dec 02 19:12:35 crc kubenswrapper[4792]: I1202 19:12:35.045842 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-c5g69"] Dec 02 19:12:35 crc kubenswrapper[4792]: I1202 19:12:35.560661 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb580c6b-0812-49a4-8c58-07c8162c2fe3" path="/var/lib/kubelet/pods/eb580c6b-0812-49a4-8c58-07c8162c2fe3/volumes" Dec 02 19:12:35 crc kubenswrapper[4792]: I1202 19:12:35.866260 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" event={"ID":"38b7b309-7222-4bf3-b8c1-33b0d01f7c29","Type":"ContainerStarted","Data":"692d1679e34d0ae8d9b6dd946a58a3907fa5f6053a5f11ed5b37a768b0f42842"} Dec 02 19:12:35 crc kubenswrapper[4792]: I1202 19:12:35.866413 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" event={"ID":"38b7b309-7222-4bf3-b8c1-33b0d01f7c29","Type":"ContainerStarted","Data":"5da0bc360782372c9201339a86bcc5a307b9e66c6fa033689eb755fd7d36148c"} Dec 02 19:12:35 crc kubenswrapper[4792]: I1202 19:12:35.898684 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" podStartSLOduration=2.277704899 podStartE2EDuration="2.89865375s" podCreationTimestamp="2025-12-02 19:12:33 +0000 UTC" firstStartedPulling="2025-12-02 19:12:34.864038463 +0000 UTC m=+2185.636930801" lastFinishedPulling="2025-12-02 19:12:35.484987284 +0000 UTC m=+2186.257879652" observedRunningTime="2025-12-02 19:12:35.884761958 +0000 UTC m=+2186.657654326" watchObservedRunningTime="2025-12-02 19:12:35.89865375 +0000 UTC m=+2186.671546108" Dec 02 19:12:37 crc kubenswrapper[4792]: I1202 19:12:37.569440 4792 scope.go:117] "RemoveContainer" containerID="9efa3d85c4e1e62d471e28a54d1341303660ad36d1805984d4d3cf4df36773ac" Dec 02 19:12:37 crc kubenswrapper[4792]: I1202 19:12:37.622022 4792 scope.go:117] "RemoveContainer" containerID="f58d2390289bd232c0a6a7727eb8a697e0c2173db747207551bb11038525df1a" Dec 02 19:12:38 crc kubenswrapper[4792]: I1202 19:12:38.081690 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:12:38 crc kubenswrapper[4792]: I1202 19:12:38.081775 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:12:45 crc kubenswrapper[4792]: I1202 19:12:45.985930 4792 generic.go:334] "Generic (PLEG): container finished" podID="38b7b309-7222-4bf3-b8c1-33b0d01f7c29" containerID="692d1679e34d0ae8d9b6dd946a58a3907fa5f6053a5f11ed5b37a768b0f42842" exitCode=0 Dec 02 19:12:45 crc kubenswrapper[4792]: I1202 19:12:45.986055 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" event={"ID":"38b7b309-7222-4bf3-b8c1-33b0d01f7c29","Type":"ContainerDied","Data":"692d1679e34d0ae8d9b6dd946a58a3907fa5f6053a5f11ed5b37a768b0f42842"} Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.575769 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.718758 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sn9n\" (UniqueName: \"kubernetes.io/projected/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-kube-api-access-6sn9n\") pod \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.718908 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-inventory\") pod \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.718947 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-ssh-key\") pod \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\" (UID: \"38b7b309-7222-4bf3-b8c1-33b0d01f7c29\") " Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.733578 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-kube-api-access-6sn9n" (OuterVolumeSpecName: "kube-api-access-6sn9n") pod "38b7b309-7222-4bf3-b8c1-33b0d01f7c29" (UID: "38b7b309-7222-4bf3-b8c1-33b0d01f7c29"). InnerVolumeSpecName "kube-api-access-6sn9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.756345 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "38b7b309-7222-4bf3-b8c1-33b0d01f7c29" (UID: "38b7b309-7222-4bf3-b8c1-33b0d01f7c29"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.767513 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-inventory" (OuterVolumeSpecName: "inventory") pod "38b7b309-7222-4bf3-b8c1-33b0d01f7c29" (UID: "38b7b309-7222-4bf3-b8c1-33b0d01f7c29"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.821510 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.821593 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:47 crc kubenswrapper[4792]: I1202 19:12:47.821609 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sn9n\" (UniqueName: \"kubernetes.io/projected/38b7b309-7222-4bf3-b8c1-33b0d01f7c29-kube-api-access-6sn9n\") on node \"crc\" DevicePath \"\"" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.010775 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" event={"ID":"38b7b309-7222-4bf3-b8c1-33b0d01f7c29","Type":"ContainerDied","Data":"5da0bc360782372c9201339a86bcc5a307b9e66c6fa033689eb755fd7d36148c"} Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.010835 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5da0bc360782372c9201339a86bcc5a307b9e66c6fa033689eb755fd7d36148c" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.010851 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bt7b8" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.123856 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9"] Dec 02 19:12:48 crc kubenswrapper[4792]: E1202 19:12:48.124657 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b7b309-7222-4bf3-b8c1-33b0d01f7c29" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.124688 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b7b309-7222-4bf3-b8c1-33b0d01f7c29" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.125092 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="38b7b309-7222-4bf3-b8c1-33b0d01f7c29" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.126486 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.135577 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9"] Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.174884 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.175187 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.175290 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.175357 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.229793 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.229857 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.229895 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n9cz\" (UniqueName: \"kubernetes.io/projected/5f606874-5cd7-4b0c-b092-c2a2d2e94728-kube-api-access-8n9cz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.331771 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.331825 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.331871 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n9cz\" (UniqueName: \"kubernetes.io/projected/5f606874-5cd7-4b0c-b092-c2a2d2e94728-kube-api-access-8n9cz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.336225 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.336298 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.358616 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n9cz\" (UniqueName: \"kubernetes.io/projected/5f606874-5cd7-4b0c-b092-c2a2d2e94728-kube-api-access-8n9cz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:48 crc kubenswrapper[4792]: I1202 19:12:48.490494 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:12:49 crc kubenswrapper[4792]: I1202 19:12:49.181344 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9"] Dec 02 19:12:50 crc kubenswrapper[4792]: I1202 19:12:50.048290 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" event={"ID":"5f606874-5cd7-4b0c-b092-c2a2d2e94728","Type":"ContainerStarted","Data":"c50efbca89eaa6733c1fe461251935332d6c6aa12c77e06778422ff0e2ad9b34"} Dec 02 19:12:50 crc kubenswrapper[4792]: I1202 19:12:50.048944 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" event={"ID":"5f606874-5cd7-4b0c-b092-c2a2d2e94728","Type":"ContainerStarted","Data":"304b22b062be63213ed7c073613e270f4bf5e031c88061862f4c714b1c38476a"} Dec 02 19:12:50 crc kubenswrapper[4792]: I1202 19:12:50.068924 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" podStartSLOduration=1.57790216 podStartE2EDuration="2.068905536s" podCreationTimestamp="2025-12-02 19:12:48 +0000 UTC" firstStartedPulling="2025-12-02 19:12:49.196075015 +0000 UTC m=+2199.968967343" lastFinishedPulling="2025-12-02 19:12:49.687078351 +0000 UTC m=+2200.459970719" observedRunningTime="2025-12-02 19:12:50.061936874 +0000 UTC m=+2200.834829202" watchObservedRunningTime="2025-12-02 19:12:50.068905536 +0000 UTC m=+2200.841797864" Dec 02 19:13:01 crc kubenswrapper[4792]: I1202 19:13:01.217298 4792 generic.go:334] "Generic (PLEG): container finished" podID="5f606874-5cd7-4b0c-b092-c2a2d2e94728" containerID="c50efbca89eaa6733c1fe461251935332d6c6aa12c77e06778422ff0e2ad9b34" exitCode=0 Dec 02 19:13:01 crc kubenswrapper[4792]: I1202 19:13:01.217804 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" event={"ID":"5f606874-5cd7-4b0c-b092-c2a2d2e94728","Type":"ContainerDied","Data":"c50efbca89eaa6733c1fe461251935332d6c6aa12c77e06778422ff0e2ad9b34"} Dec 02 19:13:02 crc kubenswrapper[4792]: I1202 19:13:02.887852 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:13:02 crc kubenswrapper[4792]: I1202 19:13:02.994725 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-inventory\") pod \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " Dec 02 19:13:02 crc kubenswrapper[4792]: I1202 19:13:02.994940 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n9cz\" (UniqueName: \"kubernetes.io/projected/5f606874-5cd7-4b0c-b092-c2a2d2e94728-kube-api-access-8n9cz\") pod \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " Dec 02 19:13:02 crc kubenswrapper[4792]: I1202 19:13:02.994969 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-ssh-key\") pod \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\" (UID: \"5f606874-5cd7-4b0c-b092-c2a2d2e94728\") " Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.000990 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f606874-5cd7-4b0c-b092-c2a2d2e94728-kube-api-access-8n9cz" (OuterVolumeSpecName: "kube-api-access-8n9cz") pod "5f606874-5cd7-4b0c-b092-c2a2d2e94728" (UID: "5f606874-5cd7-4b0c-b092-c2a2d2e94728"). InnerVolumeSpecName "kube-api-access-8n9cz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.024701 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-inventory" (OuterVolumeSpecName: "inventory") pod "5f606874-5cd7-4b0c-b092-c2a2d2e94728" (UID: "5f606874-5cd7-4b0c-b092-c2a2d2e94728"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.024884 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5f606874-5cd7-4b0c-b092-c2a2d2e94728" (UID: "5f606874-5cd7-4b0c-b092-c2a2d2e94728"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.097790 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n9cz\" (UniqueName: \"kubernetes.io/projected/5f606874-5cd7-4b0c-b092-c2a2d2e94728-kube-api-access-8n9cz\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.097839 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.097862 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f606874-5cd7-4b0c-b092-c2a2d2e94728-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.244999 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" event={"ID":"5f606874-5cd7-4b0c-b092-c2a2d2e94728","Type":"ContainerDied","Data":"304b22b062be63213ed7c073613e270f4bf5e031c88061862f4c714b1c38476a"} Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.245051 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="304b22b062be63213ed7c073613e270f4bf5e031c88061862f4c714b1c38476a" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.245116 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.346033 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx"] Dec 02 19:13:03 crc kubenswrapper[4792]: E1202 19:13:03.346513 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f606874-5cd7-4b0c-b092-c2a2d2e94728" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.346618 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f606874-5cd7-4b0c-b092-c2a2d2e94728" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.346906 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f606874-5cd7-4b0c-b092-c2a2d2e94728" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.348220 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.353460 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.354721 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.354893 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.354919 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.355020 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.355087 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.355097 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.355124 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.396447 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx"] Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510236 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510350 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510492 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510584 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510696 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvclf\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-kube-api-access-tvclf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510748 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510817 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.510966 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.511069 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.511112 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.511141 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.511241 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.511373 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.613310 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.613383 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614262 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614294 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614344 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614410 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvclf\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-kube-api-access-tvclf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614453 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614601 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614646 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614674 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614703 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614778 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.614846 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.619442 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.621764 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.621869 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.622572 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.622956 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.623050 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.623436 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.623745 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.625179 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.625754 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.626328 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.627255 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.632786 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.648780 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvclf\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-kube-api-access-tvclf\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:03 crc kubenswrapper[4792]: I1202 19:13:03.690641 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:04 crc kubenswrapper[4792]: I1202 19:13:04.304065 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx"] Dec 02 19:13:05 crc kubenswrapper[4792]: I1202 19:13:05.266809 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" event={"ID":"0d0d81cf-d181-4589-ab97-56eb22868c2f","Type":"ContainerStarted","Data":"f4589c053bd77e6f99d2275e8ab4f67ffb95dcd8b88f757f808e8be24b2d8780"} Dec 02 19:13:05 crc kubenswrapper[4792]: I1202 19:13:05.267143 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" event={"ID":"0d0d81cf-d181-4589-ab97-56eb22868c2f","Type":"ContainerStarted","Data":"9860a8892e290841ae0b303c5e0d6506c82d074aef6e34436e550b802c4e76bc"} Dec 02 19:13:05 crc kubenswrapper[4792]: I1202 19:13:05.307077 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" podStartSLOduration=1.827697108 podStartE2EDuration="2.307053621s" podCreationTimestamp="2025-12-02 19:13:03 +0000 UTC" firstStartedPulling="2025-12-02 19:13:04.312149619 +0000 UTC m=+2215.085041947" lastFinishedPulling="2025-12-02 19:13:04.791506102 +0000 UTC m=+2215.564398460" observedRunningTime="2025-12-02 19:13:05.289358438 +0000 UTC m=+2216.062250776" watchObservedRunningTime="2025-12-02 19:13:05.307053621 +0000 UTC m=+2216.079945979" Dec 02 19:13:08 crc kubenswrapper[4792]: I1202 19:13:08.081076 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:13:08 crc kubenswrapper[4792]: I1202 19:13:08.081447 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.081401 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.082188 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.082247 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.083438 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.083544 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" gracePeriod=600 Dec 02 19:13:38 crc kubenswrapper[4792]: E1202 19:13:38.215315 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.639464 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" exitCode=0 Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.639532 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e"} Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.639923 4792 scope.go:117] "RemoveContainer" containerID="11788464675fa1d7887d1f27536456407985fbf82e43eb7551c12897d3a408e0" Dec 02 19:13:38 crc kubenswrapper[4792]: I1202 19:13:38.641437 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:13:38 crc kubenswrapper[4792]: E1202 19:13:38.643951 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:13:40 crc kubenswrapper[4792]: I1202 19:13:40.845774 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zfw5p"] Dec 02 19:13:40 crc kubenswrapper[4792]: I1202 19:13:40.850846 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:40 crc kubenswrapper[4792]: I1202 19:13:40.861649 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zfw5p"] Dec 02 19:13:40 crc kubenswrapper[4792]: I1202 19:13:40.932777 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxmtd\" (UniqueName: \"kubernetes.io/projected/4a1d762b-215f-4c70-8e5a-51d43604bf2f-kube-api-access-hxmtd\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:40 crc kubenswrapper[4792]: I1202 19:13:40.932962 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-catalog-content\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:40 crc kubenswrapper[4792]: I1202 19:13:40.933288 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-utilities\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.035333 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-utilities\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.035443 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxmtd\" (UniqueName: \"kubernetes.io/projected/4a1d762b-215f-4c70-8e5a-51d43604bf2f-kube-api-access-hxmtd\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.035497 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-catalog-content\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.035999 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-utilities\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.036981 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-catalog-content\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.071945 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxmtd\" (UniqueName: \"kubernetes.io/projected/4a1d762b-215f-4c70-8e5a-51d43604bf2f-kube-api-access-hxmtd\") pod \"certified-operators-zfw5p\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.199074 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:41 crc kubenswrapper[4792]: I1202 19:13:41.733640 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zfw5p"] Dec 02 19:13:42 crc kubenswrapper[4792]: I1202 19:13:42.682656 4792 generic.go:334] "Generic (PLEG): container finished" podID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerID="f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492" exitCode=0 Dec 02 19:13:42 crc kubenswrapper[4792]: I1202 19:13:42.682754 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfw5p" event={"ID":"4a1d762b-215f-4c70-8e5a-51d43604bf2f","Type":"ContainerDied","Data":"f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492"} Dec 02 19:13:42 crc kubenswrapper[4792]: I1202 19:13:42.682985 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfw5p" event={"ID":"4a1d762b-215f-4c70-8e5a-51d43604bf2f","Type":"ContainerStarted","Data":"f28f5e1b18d778aec39ac103e22c9f06a6636a4fa2bc843ce62fac454a1752f3"} Dec 02 19:13:43 crc kubenswrapper[4792]: I1202 19:13:43.703133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfw5p" event={"ID":"4a1d762b-215f-4c70-8e5a-51d43604bf2f","Type":"ContainerStarted","Data":"5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071"} Dec 02 19:13:45 crc kubenswrapper[4792]: I1202 19:13:45.727185 4792 generic.go:334] "Generic (PLEG): container finished" podID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerID="5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071" exitCode=0 Dec 02 19:13:45 crc kubenswrapper[4792]: I1202 19:13:45.727258 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfw5p" event={"ID":"4a1d762b-215f-4c70-8e5a-51d43604bf2f","Type":"ContainerDied","Data":"5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071"} Dec 02 19:13:46 crc kubenswrapper[4792]: I1202 19:13:46.739814 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfw5p" event={"ID":"4a1d762b-215f-4c70-8e5a-51d43604bf2f","Type":"ContainerStarted","Data":"bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2"} Dec 02 19:13:46 crc kubenswrapper[4792]: I1202 19:13:46.763318 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zfw5p" podStartSLOduration=3.311011772 podStartE2EDuration="6.763302311s" podCreationTimestamp="2025-12-02 19:13:40 +0000 UTC" firstStartedPulling="2025-12-02 19:13:42.686012256 +0000 UTC m=+2253.458904624" lastFinishedPulling="2025-12-02 19:13:46.138302835 +0000 UTC m=+2256.911195163" observedRunningTime="2025-12-02 19:13:46.758701381 +0000 UTC m=+2257.531593709" watchObservedRunningTime="2025-12-02 19:13:46.763302311 +0000 UTC m=+2257.536194639" Dec 02 19:13:49 crc kubenswrapper[4792]: I1202 19:13:49.774574 4792 generic.go:334] "Generic (PLEG): container finished" podID="0d0d81cf-d181-4589-ab97-56eb22868c2f" containerID="f4589c053bd77e6f99d2275e8ab4f67ffb95dcd8b88f757f808e8be24b2d8780" exitCode=0 Dec 02 19:13:49 crc kubenswrapper[4792]: I1202 19:13:49.774684 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" event={"ID":"0d0d81cf-d181-4589-ab97-56eb22868c2f","Type":"ContainerDied","Data":"f4589c053bd77e6f99d2275e8ab4f67ffb95dcd8b88f757f808e8be24b2d8780"} Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.200231 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.202691 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.255948 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.311051 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394055 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394146 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-inventory\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394227 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-repo-setup-combined-ca-bundle\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394260 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-neutron-metadata-combined-ca-bundle\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394336 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-ovn-default-certs-0\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394397 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-telemetry-combined-ca-bundle\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394422 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394485 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-libvirt-combined-ca-bundle\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.394565 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvclf\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-kube-api-access-tvclf\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.395445 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-bootstrap-combined-ca-bundle\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.395507 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ovn-combined-ca-bundle\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.395558 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ssh-key\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.395584 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.395942 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-nova-combined-ca-bundle\") pod \"0d0d81cf-d181-4589-ab97-56eb22868c2f\" (UID: \"0d0d81cf-d181-4589-ab97-56eb22868c2f\") " Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.400404 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-kube-api-access-tvclf" (OuterVolumeSpecName: "kube-api-access-tvclf") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "kube-api-access-tvclf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.401243 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.401783 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.404721 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.405475 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.405613 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.405741 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.405758 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.405796 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.407484 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.407694 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.407867 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.428840 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.452786 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-inventory" (OuterVolumeSpecName: "inventory") pod "0d0d81cf-d181-4589-ab97-56eb22868c2f" (UID: "0d0d81cf-d181-4589-ab97-56eb22868c2f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499382 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499425 4792 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499443 4792 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499455 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvclf\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-kube-api-access-tvclf\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499469 4792 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499481 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499493 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499506 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499519 4792 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499557 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499570 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499582 4792 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499595 4792 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d0d81cf-d181-4589-ab97-56eb22868c2f-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.499607 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/0d0d81cf-d181-4589-ab97-56eb22868c2f-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.797850 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.798226 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx" event={"ID":"0d0d81cf-d181-4589-ab97-56eb22868c2f","Type":"ContainerDied","Data":"9860a8892e290841ae0b303c5e0d6506c82d074aef6e34436e550b802c4e76bc"} Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.798469 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9860a8892e290841ae0b303c5e0d6506c82d074aef6e34436e550b802c4e76bc" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.884986 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.925441 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x"] Dec 02 19:13:51 crc kubenswrapper[4792]: E1202 19:13:51.925878 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0d81cf-d181-4589-ab97-56eb22868c2f" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.925898 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0d81cf-d181-4589-ab97-56eb22868c2f" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.926101 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0d81cf-d181-4589-ab97-56eb22868c2f" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.926802 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.929798 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.930198 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.930881 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.931922 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.932629 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:13:51 crc kubenswrapper[4792]: I1202 19:13:51.951016 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x"] Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.003059 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zfw5p"] Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.009058 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.009226 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.009419 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hw5x\" (UniqueName: \"kubernetes.io/projected/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-kube-api-access-2hw5x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.009549 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.009714 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.113092 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.113180 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hw5x\" (UniqueName: \"kubernetes.io/projected/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-kube-api-access-2hw5x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.113305 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.113453 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.113589 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.114360 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.120815 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.122480 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.123846 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.143417 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hw5x\" (UniqueName: \"kubernetes.io/projected/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-kube-api-access-2hw5x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fdm4x\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.248381 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.540020 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:13:52 crc kubenswrapper[4792]: E1202 19:13:52.540690 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:13:52 crc kubenswrapper[4792]: I1202 19:13:52.859026 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x"] Dec 02 19:13:53 crc kubenswrapper[4792]: I1202 19:13:53.837146 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" event={"ID":"c5e968fa-782e-49cc-a729-ebf2f94b2bb3","Type":"ContainerStarted","Data":"f01e40540fd91b35bdb36de9a69a0ad368afc042fe1341d4748216c4c78325a1"} Dec 02 19:13:53 crc kubenswrapper[4792]: I1202 19:13:53.837871 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" event={"ID":"c5e968fa-782e-49cc-a729-ebf2f94b2bb3","Type":"ContainerStarted","Data":"cd876cd00fd6dbdbc5efda46bcf8d7ca304a16c6534a65073944b42d84a28f57"} Dec 02 19:13:53 crc kubenswrapper[4792]: I1202 19:13:53.837323 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zfw5p" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="registry-server" containerID="cri-o://bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2" gracePeriod=2 Dec 02 19:13:53 crc kubenswrapper[4792]: I1202 19:13:53.874125 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" podStartSLOduration=2.291271409 podStartE2EDuration="2.874104345s" podCreationTimestamp="2025-12-02 19:13:51 +0000 UTC" firstStartedPulling="2025-12-02 19:13:52.86218788 +0000 UTC m=+2263.635080218" lastFinishedPulling="2025-12-02 19:13:53.445020786 +0000 UTC m=+2264.217913154" observedRunningTime="2025-12-02 19:13:53.8620757 +0000 UTC m=+2264.634968028" watchObservedRunningTime="2025-12-02 19:13:53.874104345 +0000 UTC m=+2264.646996673" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.421250 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.602000 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-catalog-content\") pod \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.611880 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxmtd\" (UniqueName: \"kubernetes.io/projected/4a1d762b-215f-4c70-8e5a-51d43604bf2f-kube-api-access-hxmtd\") pod \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.612112 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-utilities\") pod \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\" (UID: \"4a1d762b-215f-4c70-8e5a-51d43604bf2f\") " Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.614449 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-utilities" (OuterVolumeSpecName: "utilities") pod "4a1d762b-215f-4c70-8e5a-51d43604bf2f" (UID: "4a1d762b-215f-4c70-8e5a-51d43604bf2f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.632638 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a1d762b-215f-4c70-8e5a-51d43604bf2f-kube-api-access-hxmtd" (OuterVolumeSpecName: "kube-api-access-hxmtd") pod "4a1d762b-215f-4c70-8e5a-51d43604bf2f" (UID: "4a1d762b-215f-4c70-8e5a-51d43604bf2f"). InnerVolumeSpecName "kube-api-access-hxmtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.685651 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a1d762b-215f-4c70-8e5a-51d43604bf2f" (UID: "4a1d762b-215f-4c70-8e5a-51d43604bf2f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.715062 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.715092 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxmtd\" (UniqueName: \"kubernetes.io/projected/4a1d762b-215f-4c70-8e5a-51d43604bf2f-kube-api-access-hxmtd\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.715106 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d762b-215f-4c70-8e5a-51d43604bf2f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.851315 4792 generic.go:334] "Generic (PLEG): container finished" podID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerID="bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2" exitCode=0 Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.851402 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfw5p" event={"ID":"4a1d762b-215f-4c70-8e5a-51d43604bf2f","Type":"ContainerDied","Data":"bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2"} Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.851433 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zfw5p" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.851483 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zfw5p" event={"ID":"4a1d762b-215f-4c70-8e5a-51d43604bf2f","Type":"ContainerDied","Data":"f28f5e1b18d778aec39ac103e22c9f06a6636a4fa2bc843ce62fac454a1752f3"} Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.851583 4792 scope.go:117] "RemoveContainer" containerID="bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.899672 4792 scope.go:117] "RemoveContainer" containerID="5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.957190 4792 scope.go:117] "RemoveContainer" containerID="f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492" Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.957298 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zfw5p"] Dec 02 19:13:54 crc kubenswrapper[4792]: I1202 19:13:54.971074 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zfw5p"] Dec 02 19:13:55 crc kubenswrapper[4792]: I1202 19:13:55.022489 4792 scope.go:117] "RemoveContainer" containerID="bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2" Dec 02 19:13:55 crc kubenswrapper[4792]: E1202 19:13:55.025842 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2\": container with ID starting with bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2 not found: ID does not exist" containerID="bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2" Dec 02 19:13:55 crc kubenswrapper[4792]: I1202 19:13:55.025881 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2"} err="failed to get container status \"bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2\": rpc error: code = NotFound desc = could not find container \"bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2\": container with ID starting with bfce06ff04adadc2db3b3281c1ef3a305e3588380b8ed8cea6d6eaef6a808ad2 not found: ID does not exist" Dec 02 19:13:55 crc kubenswrapper[4792]: I1202 19:13:55.025907 4792 scope.go:117] "RemoveContainer" containerID="5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071" Dec 02 19:13:55 crc kubenswrapper[4792]: E1202 19:13:55.029625 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071\": container with ID starting with 5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071 not found: ID does not exist" containerID="5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071" Dec 02 19:13:55 crc kubenswrapper[4792]: I1202 19:13:55.029647 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071"} err="failed to get container status \"5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071\": rpc error: code = NotFound desc = could not find container \"5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071\": container with ID starting with 5acca9c4eb820355bd51f3cf0c18f833d1e33f62b3fef8bde83509318ef5c071 not found: ID does not exist" Dec 02 19:13:55 crc kubenswrapper[4792]: I1202 19:13:55.029662 4792 scope.go:117] "RemoveContainer" containerID="f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492" Dec 02 19:13:55 crc kubenswrapper[4792]: E1202 19:13:55.033588 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492\": container with ID starting with f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492 not found: ID does not exist" containerID="f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492" Dec 02 19:13:55 crc kubenswrapper[4792]: I1202 19:13:55.033609 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492"} err="failed to get container status \"f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492\": rpc error: code = NotFound desc = could not find container \"f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492\": container with ID starting with f78fc88333016f5c237939e71143623303afef980cd091f8d3a6bf62d40c9492 not found: ID does not exist" Dec 02 19:13:55 crc kubenswrapper[4792]: I1202 19:13:55.559732 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" path="/var/lib/kubelet/pods/4a1d762b-215f-4c70-8e5a-51d43604bf2f/volumes" Dec 02 19:14:06 crc kubenswrapper[4792]: I1202 19:14:06.539771 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:14:06 crc kubenswrapper[4792]: E1202 19:14:06.542612 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:14:21 crc kubenswrapper[4792]: I1202 19:14:21.540288 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:14:21 crc kubenswrapper[4792]: E1202 19:14:21.540992 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.194632 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fvc9f"] Dec 02 19:14:33 crc kubenswrapper[4792]: E1202 19:14:33.195985 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="extract-utilities" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.196010 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="extract-utilities" Dec 02 19:14:33 crc kubenswrapper[4792]: E1202 19:14:33.196054 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="extract-content" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.196070 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="extract-content" Dec 02 19:14:33 crc kubenswrapper[4792]: E1202 19:14:33.196108 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="registry-server" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.196121 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="registry-server" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.196454 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a1d762b-215f-4c70-8e5a-51d43604bf2f" containerName="registry-server" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.199252 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.237752 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvc9f"] Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.260483 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-catalog-content\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.260689 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-utilities\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.260716 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99sxv\" (UniqueName: \"kubernetes.io/projected/157f4c86-9a9c-4190-a2e7-434d575b5b4e-kube-api-access-99sxv\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.362442 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-catalog-content\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.362656 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-utilities\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.362694 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99sxv\" (UniqueName: \"kubernetes.io/projected/157f4c86-9a9c-4190-a2e7-434d575b5b4e-kube-api-access-99sxv\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.363014 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-catalog-content\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.363092 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-utilities\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.392333 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99sxv\" (UniqueName: \"kubernetes.io/projected/157f4c86-9a9c-4190-a2e7-434d575b5b4e-kube-api-access-99sxv\") pod \"redhat-marketplace-fvc9f\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.537626 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:33 crc kubenswrapper[4792]: I1202 19:14:33.540403 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:14:33 crc kubenswrapper[4792]: E1202 19:14:33.540694 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:14:34 crc kubenswrapper[4792]: I1202 19:14:34.087738 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvc9f"] Dec 02 19:14:34 crc kubenswrapper[4792]: I1202 19:14:34.778183 4792 generic.go:334] "Generic (PLEG): container finished" podID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerID="0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53" exitCode=0 Dec 02 19:14:34 crc kubenswrapper[4792]: I1202 19:14:34.778243 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvc9f" event={"ID":"157f4c86-9a9c-4190-a2e7-434d575b5b4e","Type":"ContainerDied","Data":"0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53"} Dec 02 19:14:34 crc kubenswrapper[4792]: I1202 19:14:34.778630 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvc9f" event={"ID":"157f4c86-9a9c-4190-a2e7-434d575b5b4e","Type":"ContainerStarted","Data":"21e54874322f57a541d3de46b8cbbaf5bd9c628444745130aad9e4e05c97a428"} Dec 02 19:14:34 crc kubenswrapper[4792]: I1202 19:14:34.781364 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:14:36 crc kubenswrapper[4792]: I1202 19:14:36.822287 4792 generic.go:334] "Generic (PLEG): container finished" podID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerID="28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88" exitCode=0 Dec 02 19:14:36 crc kubenswrapper[4792]: I1202 19:14:36.822382 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvc9f" event={"ID":"157f4c86-9a9c-4190-a2e7-434d575b5b4e","Type":"ContainerDied","Data":"28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88"} Dec 02 19:14:37 crc kubenswrapper[4792]: I1202 19:14:37.840228 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvc9f" event={"ID":"157f4c86-9a9c-4190-a2e7-434d575b5b4e","Type":"ContainerStarted","Data":"ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e"} Dec 02 19:14:37 crc kubenswrapper[4792]: I1202 19:14:37.870450 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fvc9f" podStartSLOduration=2.271200259 podStartE2EDuration="4.870427404s" podCreationTimestamp="2025-12-02 19:14:33 +0000 UTC" firstStartedPulling="2025-12-02 19:14:34.781142714 +0000 UTC m=+2305.554035042" lastFinishedPulling="2025-12-02 19:14:37.380369859 +0000 UTC m=+2308.153262187" observedRunningTime="2025-12-02 19:14:37.864362005 +0000 UTC m=+2308.637254373" watchObservedRunningTime="2025-12-02 19:14:37.870427404 +0000 UTC m=+2308.643319722" Dec 02 19:14:43 crc kubenswrapper[4792]: I1202 19:14:43.538781 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:43 crc kubenswrapper[4792]: I1202 19:14:43.539549 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:43 crc kubenswrapper[4792]: I1202 19:14:43.625458 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:43 crc kubenswrapper[4792]: I1202 19:14:43.982273 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:44 crc kubenswrapper[4792]: I1202 19:14:44.096692 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvc9f"] Dec 02 19:14:45 crc kubenswrapper[4792]: I1202 19:14:45.952120 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fvc9f" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="registry-server" containerID="cri-o://ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e" gracePeriod=2 Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.623270 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.717875 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99sxv\" (UniqueName: \"kubernetes.io/projected/157f4c86-9a9c-4190-a2e7-434d575b5b4e-kube-api-access-99sxv\") pod \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.717942 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-catalog-content\") pod \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.718066 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-utilities\") pod \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\" (UID: \"157f4c86-9a9c-4190-a2e7-434d575b5b4e\") " Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.719073 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-utilities" (OuterVolumeSpecName: "utilities") pod "157f4c86-9a9c-4190-a2e7-434d575b5b4e" (UID: "157f4c86-9a9c-4190-a2e7-434d575b5b4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.723693 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/157f4c86-9a9c-4190-a2e7-434d575b5b4e-kube-api-access-99sxv" (OuterVolumeSpecName: "kube-api-access-99sxv") pod "157f4c86-9a9c-4190-a2e7-434d575b5b4e" (UID: "157f4c86-9a9c-4190-a2e7-434d575b5b4e"). InnerVolumeSpecName "kube-api-access-99sxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.753492 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "157f4c86-9a9c-4190-a2e7-434d575b5b4e" (UID: "157f4c86-9a9c-4190-a2e7-434d575b5b4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.821169 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99sxv\" (UniqueName: \"kubernetes.io/projected/157f4c86-9a9c-4190-a2e7-434d575b5b4e-kube-api-access-99sxv\") on node \"crc\" DevicePath \"\"" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.821236 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.821249 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157f4c86-9a9c-4190-a2e7-434d575b5b4e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.965715 4792 generic.go:334] "Generic (PLEG): container finished" podID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerID="ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e" exitCode=0 Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.965780 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvc9f" event={"ID":"157f4c86-9a9c-4190-a2e7-434d575b5b4e","Type":"ContainerDied","Data":"ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e"} Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.965832 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fvc9f" event={"ID":"157f4c86-9a9c-4190-a2e7-434d575b5b4e","Type":"ContainerDied","Data":"21e54874322f57a541d3de46b8cbbaf5bd9c628444745130aad9e4e05c97a428"} Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.965863 4792 scope.go:117] "RemoveContainer" containerID="ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e" Dec 02 19:14:46 crc kubenswrapper[4792]: I1202 19:14:46.965782 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fvc9f" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.010396 4792 scope.go:117] "RemoveContainer" containerID="28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.015396 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvc9f"] Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.036009 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fvc9f"] Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.040420 4792 scope.go:117] "RemoveContainer" containerID="0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.120042 4792 scope.go:117] "RemoveContainer" containerID="ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e" Dec 02 19:14:47 crc kubenswrapper[4792]: E1202 19:14:47.120586 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e\": container with ID starting with ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e not found: ID does not exist" containerID="ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.120623 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e"} err="failed to get container status \"ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e\": rpc error: code = NotFound desc = could not find container \"ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e\": container with ID starting with ff324904c01ccc552ac661151cceef8c0bf670320f12e37a112f6147d7b0fd7e not found: ID does not exist" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.120648 4792 scope.go:117] "RemoveContainer" containerID="28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88" Dec 02 19:14:47 crc kubenswrapper[4792]: E1202 19:14:47.121509 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88\": container with ID starting with 28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88 not found: ID does not exist" containerID="28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.121586 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88"} err="failed to get container status \"28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88\": rpc error: code = NotFound desc = could not find container \"28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88\": container with ID starting with 28c66b59147447728b2cd2efa521b1ffb5379adcc4584c21ec3994d05aa0eb88 not found: ID does not exist" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.121625 4792 scope.go:117] "RemoveContainer" containerID="0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53" Dec 02 19:14:47 crc kubenswrapper[4792]: E1202 19:14:47.122112 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53\": container with ID starting with 0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53 not found: ID does not exist" containerID="0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.122160 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53"} err="failed to get container status \"0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53\": rpc error: code = NotFound desc = could not find container \"0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53\": container with ID starting with 0e1441c2bac8804e082166655c1cf212a0730758faec7c6ba428ecf46e529b53 not found: ID does not exist" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.539924 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:14:47 crc kubenswrapper[4792]: E1202 19:14:47.540839 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:14:47 crc kubenswrapper[4792]: I1202 19:14:47.561146 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" path="/var/lib/kubelet/pods/157f4c86-9a9c-4190-a2e7-434d575b5b4e/volumes" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.188072 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr"] Dec 02 19:15:00 crc kubenswrapper[4792]: E1202 19:15:00.189297 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="extract-content" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.189319 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="extract-content" Dec 02 19:15:00 crc kubenswrapper[4792]: E1202 19:15:00.189344 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="extract-utilities" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.189358 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="extract-utilities" Dec 02 19:15:00 crc kubenswrapper[4792]: E1202 19:15:00.189381 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="registry-server" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.189393 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="registry-server" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.189760 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="157f4c86-9a9c-4190-a2e7-434d575b5b4e" containerName="registry-server" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.190992 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.192716 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr"] Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.198986 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.201708 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.302798 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e61c09-7836-4f56-9dcd-229acd29ef98-secret-volume\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.303153 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e61c09-7836-4f56-9dcd-229acd29ef98-config-volume\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.303258 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz8fh\" (UniqueName: \"kubernetes.io/projected/98e61c09-7836-4f56-9dcd-229acd29ef98-kube-api-access-tz8fh\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.406138 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e61c09-7836-4f56-9dcd-229acd29ef98-secret-volume\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.406231 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e61c09-7836-4f56-9dcd-229acd29ef98-config-volume\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.406340 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz8fh\" (UniqueName: \"kubernetes.io/projected/98e61c09-7836-4f56-9dcd-229acd29ef98-kube-api-access-tz8fh\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.407164 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e61c09-7836-4f56-9dcd-229acd29ef98-config-volume\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.422911 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e61c09-7836-4f56-9dcd-229acd29ef98-secret-volume\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.423101 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz8fh\" (UniqueName: \"kubernetes.io/projected/98e61c09-7836-4f56-9dcd-229acd29ef98-kube-api-access-tz8fh\") pod \"collect-profiles-29411715-tk7xr\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:00 crc kubenswrapper[4792]: I1202 19:15:00.522297 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:01 crc kubenswrapper[4792]: I1202 19:15:01.093644 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr"] Dec 02 19:15:01 crc kubenswrapper[4792]: W1202 19:15:01.101449 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98e61c09_7836_4f56_9dcd_229acd29ef98.slice/crio-2c8fb759aaa3bee100f3d089c693b1d9a35efa506efa2f5d11302f1f20be6de2 WatchSource:0}: Error finding container 2c8fb759aaa3bee100f3d089c693b1d9a35efa506efa2f5d11302f1f20be6de2: Status 404 returned error can't find the container with id 2c8fb759aaa3bee100f3d089c693b1d9a35efa506efa2f5d11302f1f20be6de2 Dec 02 19:15:01 crc kubenswrapper[4792]: I1202 19:15:01.147456 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" event={"ID":"98e61c09-7836-4f56-9dcd-229acd29ef98","Type":"ContainerStarted","Data":"2c8fb759aaa3bee100f3d089c693b1d9a35efa506efa2f5d11302f1f20be6de2"} Dec 02 19:15:01 crc kubenswrapper[4792]: I1202 19:15:01.540419 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:15:01 crc kubenswrapper[4792]: E1202 19:15:01.541747 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:15:02 crc kubenswrapper[4792]: I1202 19:15:02.165767 4792 generic.go:334] "Generic (PLEG): container finished" podID="98e61c09-7836-4f56-9dcd-229acd29ef98" containerID="3cc79bbabe9e2bceb13e27028a9992b405c01e2f215c1f79c30fb9406115d2ce" exitCode=0 Dec 02 19:15:02 crc kubenswrapper[4792]: I1202 19:15:02.165834 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" event={"ID":"98e61c09-7836-4f56-9dcd-229acd29ef98","Type":"ContainerDied","Data":"3cc79bbabe9e2bceb13e27028a9992b405c01e2f215c1f79c30fb9406115d2ce"} Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.585912 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.682274 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e61c09-7836-4f56-9dcd-229acd29ef98-config-volume\") pod \"98e61c09-7836-4f56-9dcd-229acd29ef98\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.682515 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e61c09-7836-4f56-9dcd-229acd29ef98-secret-volume\") pod \"98e61c09-7836-4f56-9dcd-229acd29ef98\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.682649 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tz8fh\" (UniqueName: \"kubernetes.io/projected/98e61c09-7836-4f56-9dcd-229acd29ef98-kube-api-access-tz8fh\") pod \"98e61c09-7836-4f56-9dcd-229acd29ef98\" (UID: \"98e61c09-7836-4f56-9dcd-229acd29ef98\") " Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.683229 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98e61c09-7836-4f56-9dcd-229acd29ef98-config-volume" (OuterVolumeSpecName: "config-volume") pod "98e61c09-7836-4f56-9dcd-229acd29ef98" (UID: "98e61c09-7836-4f56-9dcd-229acd29ef98"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.684069 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e61c09-7836-4f56-9dcd-229acd29ef98-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.688651 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98e61c09-7836-4f56-9dcd-229acd29ef98-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "98e61c09-7836-4f56-9dcd-229acd29ef98" (UID: "98e61c09-7836-4f56-9dcd-229acd29ef98"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.689945 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98e61c09-7836-4f56-9dcd-229acd29ef98-kube-api-access-tz8fh" (OuterVolumeSpecName: "kube-api-access-tz8fh") pod "98e61c09-7836-4f56-9dcd-229acd29ef98" (UID: "98e61c09-7836-4f56-9dcd-229acd29ef98"). InnerVolumeSpecName "kube-api-access-tz8fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.786261 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e61c09-7836-4f56-9dcd-229acd29ef98-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:03 crc kubenswrapper[4792]: I1202 19:15:03.786318 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tz8fh\" (UniqueName: \"kubernetes.io/projected/98e61c09-7836-4f56-9dcd-229acd29ef98-kube-api-access-tz8fh\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:04 crc kubenswrapper[4792]: I1202 19:15:04.189797 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" event={"ID":"98e61c09-7836-4f56-9dcd-229acd29ef98","Type":"ContainerDied","Data":"2c8fb759aaa3bee100f3d089c693b1d9a35efa506efa2f5d11302f1f20be6de2"} Dec 02 19:15:04 crc kubenswrapper[4792]: I1202 19:15:04.190146 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c8fb759aaa3bee100f3d089c693b1d9a35efa506efa2f5d11302f1f20be6de2" Dec 02 19:15:04 crc kubenswrapper[4792]: I1202 19:15:04.189856 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411715-tk7xr" Dec 02 19:15:04 crc kubenswrapper[4792]: I1202 19:15:04.675657 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw"] Dec 02 19:15:04 crc kubenswrapper[4792]: I1202 19:15:04.685917 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411670-9n2pw"] Dec 02 19:15:05 crc kubenswrapper[4792]: I1202 19:15:05.581868 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64e0e779-21ef-489e-8721-54533e24bf31" path="/var/lib/kubelet/pods/64e0e779-21ef-489e-8721-54533e24bf31/volumes" Dec 02 19:15:11 crc kubenswrapper[4792]: I1202 19:15:11.282627 4792 generic.go:334] "Generic (PLEG): container finished" podID="c5e968fa-782e-49cc-a729-ebf2f94b2bb3" containerID="f01e40540fd91b35bdb36de9a69a0ad368afc042fe1341d4748216c4c78325a1" exitCode=0 Dec 02 19:15:11 crc kubenswrapper[4792]: I1202 19:15:11.282808 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" event={"ID":"c5e968fa-782e-49cc-a729-ebf2f94b2bb3","Type":"ContainerDied","Data":"f01e40540fd91b35bdb36de9a69a0ad368afc042fe1341d4748216c4c78325a1"} Dec 02 19:15:12 crc kubenswrapper[4792]: I1202 19:15:12.955587 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.017443 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ssh-key\") pod \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.017556 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-inventory\") pod \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.017606 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovncontroller-config-0\") pod \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.017658 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovn-combined-ca-bundle\") pod \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.017900 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hw5x\" (UniqueName: \"kubernetes.io/projected/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-kube-api-access-2hw5x\") pod \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\" (UID: \"c5e968fa-782e-49cc-a729-ebf2f94b2bb3\") " Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.025989 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-kube-api-access-2hw5x" (OuterVolumeSpecName: "kube-api-access-2hw5x") pod "c5e968fa-782e-49cc-a729-ebf2f94b2bb3" (UID: "c5e968fa-782e-49cc-a729-ebf2f94b2bb3"). InnerVolumeSpecName "kube-api-access-2hw5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.027825 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c5e968fa-782e-49cc-a729-ebf2f94b2bb3" (UID: "c5e968fa-782e-49cc-a729-ebf2f94b2bb3"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.051060 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "c5e968fa-782e-49cc-a729-ebf2f94b2bb3" (UID: "c5e968fa-782e-49cc-a729-ebf2f94b2bb3"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.054238 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c5e968fa-782e-49cc-a729-ebf2f94b2bb3" (UID: "c5e968fa-782e-49cc-a729-ebf2f94b2bb3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.065483 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-inventory" (OuterVolumeSpecName: "inventory") pod "c5e968fa-782e-49cc-a729-ebf2f94b2bb3" (UID: "c5e968fa-782e-49cc-a729-ebf2f94b2bb3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.120975 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.121022 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.121040 4792 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.121056 4792 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.121071 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hw5x\" (UniqueName: \"kubernetes.io/projected/c5e968fa-782e-49cc-a729-ebf2f94b2bb3-kube-api-access-2hw5x\") on node \"crc\" DevicePath \"\"" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.309154 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" event={"ID":"c5e968fa-782e-49cc-a729-ebf2f94b2bb3","Type":"ContainerDied","Data":"cd876cd00fd6dbdbc5efda46bcf8d7ca304a16c6534a65073944b42d84a28f57"} Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.309213 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd876cd00fd6dbdbc5efda46bcf8d7ca304a16c6534a65073944b42d84a28f57" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.309239 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fdm4x" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.426178 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p"] Dec 02 19:15:13 crc kubenswrapper[4792]: E1202 19:15:13.426702 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e968fa-782e-49cc-a729-ebf2f94b2bb3" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.426723 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e968fa-782e-49cc-a729-ebf2f94b2bb3" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 19:15:13 crc kubenswrapper[4792]: E1202 19:15:13.426739 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98e61c09-7836-4f56-9dcd-229acd29ef98" containerName="collect-profiles" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.426748 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="98e61c09-7836-4f56-9dcd-229acd29ef98" containerName="collect-profiles" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.426994 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5e968fa-782e-49cc-a729-ebf2f94b2bb3" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.427014 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="98e61c09-7836-4f56-9dcd-229acd29ef98" containerName="collect-profiles" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.427833 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.435071 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.436049 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.436702 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.436891 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.436763 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.437287 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.437555 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p"] Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.529850 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.530151 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.530197 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.530322 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.530363 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.530395 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44hb6\" (UniqueName: \"kubernetes.io/projected/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-kube-api-access-44hb6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.632157 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.632487 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.632739 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44hb6\" (UniqueName: \"kubernetes.io/projected/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-kube-api-access-44hb6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.634400 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.634591 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.634720 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.639154 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.639225 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.645548 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.645988 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.649454 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.653037 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44hb6\" (UniqueName: \"kubernetes.io/projected/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-kube-api-access-44hb6\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:13 crc kubenswrapper[4792]: I1202 19:15:13.765422 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:15:14 crc kubenswrapper[4792]: I1202 19:15:14.466496 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p"] Dec 02 19:15:14 crc kubenswrapper[4792]: I1202 19:15:14.541451 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:15:14 crc kubenswrapper[4792]: E1202 19:15:14.541679 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:15:15 crc kubenswrapper[4792]: I1202 19:15:15.332175 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" event={"ID":"bf6f87b0-64cb-4aa6-87b5-f4496dd79953","Type":"ContainerStarted","Data":"b7b9746aa98f8b3facb764b58433c0530b9ce322ba9992182f065e8c91bcc796"} Dec 02 19:15:15 crc kubenswrapper[4792]: I1202 19:15:15.332968 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" event={"ID":"bf6f87b0-64cb-4aa6-87b5-f4496dd79953","Type":"ContainerStarted","Data":"b407ba79010b9cd281808c4b1188563df735d0b0b98c9634b7f8567ee896f6cc"} Dec 02 19:15:15 crc kubenswrapper[4792]: I1202 19:15:15.363630 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" podStartSLOduration=1.8121002160000002 podStartE2EDuration="2.363604792s" podCreationTimestamp="2025-12-02 19:15:13 +0000 UTC" firstStartedPulling="2025-12-02 19:15:14.469843094 +0000 UTC m=+2345.242735422" lastFinishedPulling="2025-12-02 19:15:15.02134763 +0000 UTC m=+2345.794239998" observedRunningTime="2025-12-02 19:15:15.352931084 +0000 UTC m=+2346.125823432" watchObservedRunningTime="2025-12-02 19:15:15.363604792 +0000 UTC m=+2346.136497130" Dec 02 19:15:29 crc kubenswrapper[4792]: I1202 19:15:29.539912 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:15:29 crc kubenswrapper[4792]: E1202 19:15:29.540702 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:15:37 crc kubenswrapper[4792]: I1202 19:15:37.764621 4792 scope.go:117] "RemoveContainer" containerID="de3a76cc713bc3cff53df24c8963afadeab33b153745e493a8423518f4bdf21e" Dec 02 19:15:43 crc kubenswrapper[4792]: I1202 19:15:43.540871 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:15:43 crc kubenswrapper[4792]: E1202 19:15:43.541945 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:15:58 crc kubenswrapper[4792]: I1202 19:15:58.540150 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:15:58 crc kubenswrapper[4792]: E1202 19:15:58.541258 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:16:11 crc kubenswrapper[4792]: I1202 19:16:11.546055 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:16:11 crc kubenswrapper[4792]: E1202 19:16:11.546724 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:16:16 crc kubenswrapper[4792]: I1202 19:16:16.114007 4792 generic.go:334] "Generic (PLEG): container finished" podID="bf6f87b0-64cb-4aa6-87b5-f4496dd79953" containerID="b7b9746aa98f8b3facb764b58433c0530b9ce322ba9992182f065e8c91bcc796" exitCode=0 Dec 02 19:16:16 crc kubenswrapper[4792]: I1202 19:16:16.114140 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" event={"ID":"bf6f87b0-64cb-4aa6-87b5-f4496dd79953","Type":"ContainerDied","Data":"b7b9746aa98f8b3facb764b58433c0530b9ce322ba9992182f065e8c91bcc796"} Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.716939 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.771481 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-metadata-combined-ca-bundle\") pod \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.771796 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-inventory\") pod \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.771935 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-ovn-metadata-agent-neutron-config-0\") pod \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.772677 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-nova-metadata-neutron-config-0\") pod \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.772918 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-ssh-key\") pod \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.772983 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44hb6\" (UniqueName: \"kubernetes.io/projected/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-kube-api-access-44hb6\") pod \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\" (UID: \"bf6f87b0-64cb-4aa6-87b5-f4496dd79953\") " Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.781031 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "bf6f87b0-64cb-4aa6-87b5-f4496dd79953" (UID: "bf6f87b0-64cb-4aa6-87b5-f4496dd79953"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.781151 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-kube-api-access-44hb6" (OuterVolumeSpecName: "kube-api-access-44hb6") pod "bf6f87b0-64cb-4aa6-87b5-f4496dd79953" (UID: "bf6f87b0-64cb-4aa6-87b5-f4496dd79953"). InnerVolumeSpecName "kube-api-access-44hb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.807909 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bf6f87b0-64cb-4aa6-87b5-f4496dd79953" (UID: "bf6f87b0-64cb-4aa6-87b5-f4496dd79953"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.817247 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "bf6f87b0-64cb-4aa6-87b5-f4496dd79953" (UID: "bf6f87b0-64cb-4aa6-87b5-f4496dd79953"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.821016 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "bf6f87b0-64cb-4aa6-87b5-f4496dd79953" (UID: "bf6f87b0-64cb-4aa6-87b5-f4496dd79953"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.834390 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-inventory" (OuterVolumeSpecName: "inventory") pod "bf6f87b0-64cb-4aa6-87b5-f4496dd79953" (UID: "bf6f87b0-64cb-4aa6-87b5-f4496dd79953"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.875707 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.875822 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44hb6\" (UniqueName: \"kubernetes.io/projected/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-kube-api-access-44hb6\") on node \"crc\" DevicePath \"\"" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.875894 4792 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.875959 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.876030 4792 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:16:17 crc kubenswrapper[4792]: I1202 19:16:17.876112 4792 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/bf6f87b0-64cb-4aa6-87b5-f4496dd79953-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.146719 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" event={"ID":"bf6f87b0-64cb-4aa6-87b5-f4496dd79953","Type":"ContainerDied","Data":"b407ba79010b9cd281808c4b1188563df735d0b0b98c9634b7f8567ee896f6cc"} Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.146777 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b407ba79010b9cd281808c4b1188563df735d0b0b98c9634b7f8567ee896f6cc" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.146816 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.300353 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r"] Dec 02 19:16:18 crc kubenswrapper[4792]: E1202 19:16:18.301228 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf6f87b0-64cb-4aa6-87b5-f4496dd79953" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.301277 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf6f87b0-64cb-4aa6-87b5-f4496dd79953" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.301971 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf6f87b0-64cb-4aa6-87b5-f4496dd79953" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.303773 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.314050 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.314071 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.314294 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.314357 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.314683 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.341848 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r"] Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.392289 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.392393 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.392449 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.392849 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.393017 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2cch\" (UniqueName: \"kubernetes.io/projected/c0a31132-972a-4e92-b005-de8cacadfe2e-kube-api-access-w2cch\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.495652 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2cch\" (UniqueName: \"kubernetes.io/projected/c0a31132-972a-4e92-b005-de8cacadfe2e-kube-api-access-w2cch\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.495741 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.495811 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.495854 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.495968 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.499286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.499306 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.500427 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.500680 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.511076 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2cch\" (UniqueName: \"kubernetes.io/projected/c0a31132-972a-4e92-b005-de8cacadfe2e-kube-api-access-w2cch\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-chd8r\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:18 crc kubenswrapper[4792]: I1202 19:16:18.639588 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:16:19 crc kubenswrapper[4792]: I1202 19:16:19.711748 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r"] Dec 02 19:16:19 crc kubenswrapper[4792]: W1202 19:16:19.713642 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0a31132_972a_4e92_b005_de8cacadfe2e.slice/crio-f22af1972163034b33fe26eedd694ddfcf4c49f77172892c0bcf395a9de3de6f WatchSource:0}: Error finding container f22af1972163034b33fe26eedd694ddfcf4c49f77172892c0bcf395a9de3de6f: Status 404 returned error can't find the container with id f22af1972163034b33fe26eedd694ddfcf4c49f77172892c0bcf395a9de3de6f Dec 02 19:16:20 crc kubenswrapper[4792]: I1202 19:16:20.168678 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" event={"ID":"c0a31132-972a-4e92-b005-de8cacadfe2e","Type":"ContainerStarted","Data":"f22af1972163034b33fe26eedd694ddfcf4c49f77172892c0bcf395a9de3de6f"} Dec 02 19:16:21 crc kubenswrapper[4792]: I1202 19:16:21.185774 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" event={"ID":"c0a31132-972a-4e92-b005-de8cacadfe2e","Type":"ContainerStarted","Data":"3c0cd430374399fcbf1a660fcf354ddafd0fe79d17c2ac5dc7af8c0ac7e95665"} Dec 02 19:16:21 crc kubenswrapper[4792]: I1202 19:16:21.216268 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" podStartSLOduration=2.6665104619999997 podStartE2EDuration="3.216244222s" podCreationTimestamp="2025-12-02 19:16:18 +0000 UTC" firstStartedPulling="2025-12-02 19:16:19.718051929 +0000 UTC m=+2410.490944297" lastFinishedPulling="2025-12-02 19:16:20.267785689 +0000 UTC m=+2411.040678057" observedRunningTime="2025-12-02 19:16:21.210311458 +0000 UTC m=+2411.983203826" watchObservedRunningTime="2025-12-02 19:16:21.216244222 +0000 UTC m=+2411.989136580" Dec 02 19:16:25 crc kubenswrapper[4792]: I1202 19:16:25.556853 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:16:25 crc kubenswrapper[4792]: E1202 19:16:25.558610 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:16:39 crc kubenswrapper[4792]: I1202 19:16:39.556312 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:16:39 crc kubenswrapper[4792]: E1202 19:16:39.557948 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:16:51 crc kubenswrapper[4792]: I1202 19:16:51.540022 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:16:51 crc kubenswrapper[4792]: E1202 19:16:51.541002 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.743683 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fg4jt"] Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.748004 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.763488 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fg4jt"] Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.851272 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-utilities\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.851530 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42rmv\" (UniqueName: \"kubernetes.io/projected/758e6120-7b0a-457c-ad3e-e7ee8554d59c-kube-api-access-42rmv\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.851621 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-catalog-content\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.954085 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-utilities\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.954279 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42rmv\" (UniqueName: \"kubernetes.io/projected/758e6120-7b0a-457c-ad3e-e7ee8554d59c-kube-api-access-42rmv\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.954337 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-catalog-content\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.955286 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-catalog-content\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.955797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-utilities\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:02 crc kubenswrapper[4792]: I1202 19:17:02.984182 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42rmv\" (UniqueName: \"kubernetes.io/projected/758e6120-7b0a-457c-ad3e-e7ee8554d59c-kube-api-access-42rmv\") pod \"redhat-operators-fg4jt\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:03 crc kubenswrapper[4792]: I1202 19:17:03.087967 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:03 crc kubenswrapper[4792]: I1202 19:17:03.540258 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:17:03 crc kubenswrapper[4792]: E1202 19:17:03.540987 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:17:03 crc kubenswrapper[4792]: I1202 19:17:03.624162 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fg4jt"] Dec 02 19:17:03 crc kubenswrapper[4792]: I1202 19:17:03.750933 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg4jt" event={"ID":"758e6120-7b0a-457c-ad3e-e7ee8554d59c","Type":"ContainerStarted","Data":"a0b984e81e4163952f09475b76bad535e283f075fe88c8f4e63a37991207d935"} Dec 02 19:17:04 crc kubenswrapper[4792]: I1202 19:17:04.767300 4792 generic.go:334] "Generic (PLEG): container finished" podID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerID="06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6" exitCode=0 Dec 02 19:17:04 crc kubenswrapper[4792]: I1202 19:17:04.767388 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg4jt" event={"ID":"758e6120-7b0a-457c-ad3e-e7ee8554d59c","Type":"ContainerDied","Data":"06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6"} Dec 02 19:17:05 crc kubenswrapper[4792]: I1202 19:17:05.787332 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg4jt" event={"ID":"758e6120-7b0a-457c-ad3e-e7ee8554d59c","Type":"ContainerStarted","Data":"ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3"} Dec 02 19:17:07 crc kubenswrapper[4792]: I1202 19:17:07.813605 4792 generic.go:334] "Generic (PLEG): container finished" podID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerID="ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3" exitCode=0 Dec 02 19:17:07 crc kubenswrapper[4792]: I1202 19:17:07.813678 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg4jt" event={"ID":"758e6120-7b0a-457c-ad3e-e7ee8554d59c","Type":"ContainerDied","Data":"ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3"} Dec 02 19:17:09 crc kubenswrapper[4792]: I1202 19:17:09.844954 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg4jt" event={"ID":"758e6120-7b0a-457c-ad3e-e7ee8554d59c","Type":"ContainerStarted","Data":"44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45"} Dec 02 19:17:09 crc kubenswrapper[4792]: I1202 19:17:09.872797 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fg4jt" podStartSLOduration=4.058432767 podStartE2EDuration="7.872777167s" podCreationTimestamp="2025-12-02 19:17:02 +0000 UTC" firstStartedPulling="2025-12-02 19:17:04.769900508 +0000 UTC m=+2455.542792836" lastFinishedPulling="2025-12-02 19:17:08.584244898 +0000 UTC m=+2459.357137236" observedRunningTime="2025-12-02 19:17:09.868516125 +0000 UTC m=+2460.641408493" watchObservedRunningTime="2025-12-02 19:17:09.872777167 +0000 UTC m=+2460.645669515" Dec 02 19:17:13 crc kubenswrapper[4792]: I1202 19:17:13.088872 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:13 crc kubenswrapper[4792]: I1202 19:17:13.094747 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:14 crc kubenswrapper[4792]: I1202 19:17:14.162596 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fg4jt" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="registry-server" probeResult="failure" output=< Dec 02 19:17:14 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 19:17:14 crc kubenswrapper[4792]: > Dec 02 19:17:15 crc kubenswrapper[4792]: I1202 19:17:15.541864 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:17:15 crc kubenswrapper[4792]: E1202 19:17:15.542458 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:17:23 crc kubenswrapper[4792]: I1202 19:17:23.154560 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:23 crc kubenswrapper[4792]: I1202 19:17:23.217027 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:23 crc kubenswrapper[4792]: I1202 19:17:23.411410 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fg4jt"] Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.009218 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fg4jt" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="registry-server" containerID="cri-o://44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45" gracePeriod=2 Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.555606 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.696602 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42rmv\" (UniqueName: \"kubernetes.io/projected/758e6120-7b0a-457c-ad3e-e7ee8554d59c-kube-api-access-42rmv\") pod \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.696981 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-utilities\") pod \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.697183 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-catalog-content\") pod \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\" (UID: \"758e6120-7b0a-457c-ad3e-e7ee8554d59c\") " Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.698682 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-utilities" (OuterVolumeSpecName: "utilities") pod "758e6120-7b0a-457c-ad3e-e7ee8554d59c" (UID: "758e6120-7b0a-457c-ad3e-e7ee8554d59c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.717335 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/758e6120-7b0a-457c-ad3e-e7ee8554d59c-kube-api-access-42rmv" (OuterVolumeSpecName: "kube-api-access-42rmv") pod "758e6120-7b0a-457c-ad3e-e7ee8554d59c" (UID: "758e6120-7b0a-457c-ad3e-e7ee8554d59c"). InnerVolumeSpecName "kube-api-access-42rmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.799035 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42rmv\" (UniqueName: \"kubernetes.io/projected/758e6120-7b0a-457c-ad3e-e7ee8554d59c-kube-api-access-42rmv\") on node \"crc\" DevicePath \"\"" Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.799064 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.812389 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "758e6120-7b0a-457c-ad3e-e7ee8554d59c" (UID: "758e6120-7b0a-457c-ad3e-e7ee8554d59c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:17:25 crc kubenswrapper[4792]: I1202 19:17:25.902128 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758e6120-7b0a-457c-ad3e-e7ee8554d59c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.032023 4792 generic.go:334] "Generic (PLEG): container finished" podID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerID="44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45" exitCode=0 Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.032086 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg4jt" event={"ID":"758e6120-7b0a-457c-ad3e-e7ee8554d59c","Type":"ContainerDied","Data":"44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45"} Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.032126 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg4jt" event={"ID":"758e6120-7b0a-457c-ad3e-e7ee8554d59c","Type":"ContainerDied","Data":"a0b984e81e4163952f09475b76bad535e283f075fe88c8f4e63a37991207d935"} Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.032152 4792 scope.go:117] "RemoveContainer" containerID="44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.032380 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg4jt" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.086022 4792 scope.go:117] "RemoveContainer" containerID="ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.122601 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fg4jt"] Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.139871 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fg4jt"] Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.141701 4792 scope.go:117] "RemoveContainer" containerID="06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.190788 4792 scope.go:117] "RemoveContainer" containerID="44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45" Dec 02 19:17:26 crc kubenswrapper[4792]: E1202 19:17:26.191270 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45\": container with ID starting with 44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45 not found: ID does not exist" containerID="44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.191303 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45"} err="failed to get container status \"44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45\": rpc error: code = NotFound desc = could not find container \"44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45\": container with ID starting with 44872ba92c4a1da951752a7f674acf5bcade7d29a8263bf5744e7b96451daf45 not found: ID does not exist" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.191324 4792 scope.go:117] "RemoveContainer" containerID="ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3" Dec 02 19:17:26 crc kubenswrapper[4792]: E1202 19:17:26.191513 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3\": container with ID starting with ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3 not found: ID does not exist" containerID="ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.191534 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3"} err="failed to get container status \"ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3\": rpc error: code = NotFound desc = could not find container \"ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3\": container with ID starting with ef865c8d2474994b8f04f953b81d6466fed8969a3c5c1b5e185b5afba68527b3 not found: ID does not exist" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.191559 4792 scope.go:117] "RemoveContainer" containerID="06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6" Dec 02 19:17:26 crc kubenswrapper[4792]: E1202 19:17:26.191849 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6\": container with ID starting with 06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6 not found: ID does not exist" containerID="06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6" Dec 02 19:17:26 crc kubenswrapper[4792]: I1202 19:17:26.191868 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6"} err="failed to get container status \"06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6\": rpc error: code = NotFound desc = could not find container \"06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6\": container with ID starting with 06803661337a7d47d2d6817c90796af0b68acfc05f2673fe015e14e7e18965d6 not found: ID does not exist" Dec 02 19:17:27 crc kubenswrapper[4792]: I1202 19:17:27.555854 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" path="/var/lib/kubelet/pods/758e6120-7b0a-457c-ad3e-e7ee8554d59c/volumes" Dec 02 19:17:28 crc kubenswrapper[4792]: I1202 19:17:28.540225 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:17:28 crc kubenswrapper[4792]: E1202 19:17:28.540809 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:17:43 crc kubenswrapper[4792]: I1202 19:17:43.540339 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:17:43 crc kubenswrapper[4792]: E1202 19:17:43.541282 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:17:55 crc kubenswrapper[4792]: I1202 19:17:55.540737 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:17:55 crc kubenswrapper[4792]: E1202 19:17:55.541741 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:18:08 crc kubenswrapper[4792]: I1202 19:18:08.540020 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:18:08 crc kubenswrapper[4792]: E1202 19:18:08.540861 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:18:23 crc kubenswrapper[4792]: I1202 19:18:23.547318 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:18:23 crc kubenswrapper[4792]: E1202 19:18:23.549791 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:18:30 crc kubenswrapper[4792]: I1202 19:18:30.912099 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zmzjc"] Dec 02 19:18:30 crc kubenswrapper[4792]: E1202 19:18:30.914430 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="extract-utilities" Dec 02 19:18:30 crc kubenswrapper[4792]: I1202 19:18:30.914567 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="extract-utilities" Dec 02 19:18:30 crc kubenswrapper[4792]: E1202 19:18:30.914657 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="registry-server" Dec 02 19:18:30 crc kubenswrapper[4792]: I1202 19:18:30.914735 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="registry-server" Dec 02 19:18:30 crc kubenswrapper[4792]: E1202 19:18:30.914819 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="extract-content" Dec 02 19:18:30 crc kubenswrapper[4792]: I1202 19:18:30.914899 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="extract-content" Dec 02 19:18:30 crc kubenswrapper[4792]: I1202 19:18:30.915268 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="758e6120-7b0a-457c-ad3e-e7ee8554d59c" containerName="registry-server" Dec 02 19:18:30 crc kubenswrapper[4792]: I1202 19:18:30.917599 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:30 crc kubenswrapper[4792]: I1202 19:18:30.929161 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmzjc"] Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.074335 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w897j\" (UniqueName: \"kubernetes.io/projected/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-kube-api-access-w897j\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.074494 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-utilities\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.074562 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-catalog-content\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.176939 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w897j\" (UniqueName: \"kubernetes.io/projected/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-kube-api-access-w897j\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.177070 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-utilities\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.177106 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-catalog-content\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.177566 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-catalog-content\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.178190 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-utilities\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.213778 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w897j\" (UniqueName: \"kubernetes.io/projected/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-kube-api-access-w897j\") pod \"community-operators-zmzjc\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.254432 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.784292 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmzjc"] Dec 02 19:18:31 crc kubenswrapper[4792]: I1202 19:18:31.836276 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmzjc" event={"ID":"c5ef3f19-9535-4a40-b56d-8fa0e94a2158","Type":"ContainerStarted","Data":"13a079550503e8c380fec5fa5c22c56e331a5d1bfac8e27cc2960c438d1e9e7e"} Dec 02 19:18:32 crc kubenswrapper[4792]: I1202 19:18:32.854315 4792 generic.go:334] "Generic (PLEG): container finished" podID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerID="2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558" exitCode=0 Dec 02 19:18:32 crc kubenswrapper[4792]: I1202 19:18:32.854435 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmzjc" event={"ID":"c5ef3f19-9535-4a40-b56d-8fa0e94a2158","Type":"ContainerDied","Data":"2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558"} Dec 02 19:18:34 crc kubenswrapper[4792]: I1202 19:18:34.884094 4792 generic.go:334] "Generic (PLEG): container finished" podID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerID="874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8" exitCode=0 Dec 02 19:18:34 crc kubenswrapper[4792]: I1202 19:18:34.884140 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmzjc" event={"ID":"c5ef3f19-9535-4a40-b56d-8fa0e94a2158","Type":"ContainerDied","Data":"874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8"} Dec 02 19:18:35 crc kubenswrapper[4792]: I1202 19:18:35.902840 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmzjc" event={"ID":"c5ef3f19-9535-4a40-b56d-8fa0e94a2158","Type":"ContainerStarted","Data":"1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3"} Dec 02 19:18:35 crc kubenswrapper[4792]: I1202 19:18:35.949007 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zmzjc" podStartSLOduration=3.368765487 podStartE2EDuration="5.948977919s" podCreationTimestamp="2025-12-02 19:18:30 +0000 UTC" firstStartedPulling="2025-12-02 19:18:32.85657985 +0000 UTC m=+2543.629472198" lastFinishedPulling="2025-12-02 19:18:35.436792272 +0000 UTC m=+2546.209684630" observedRunningTime="2025-12-02 19:18:35.932092247 +0000 UTC m=+2546.704984605" watchObservedRunningTime="2025-12-02 19:18:35.948977919 +0000 UTC m=+2546.721870287" Dec 02 19:18:37 crc kubenswrapper[4792]: I1202 19:18:37.540837 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:18:37 crc kubenswrapper[4792]: E1202 19:18:37.541471 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:18:41 crc kubenswrapper[4792]: I1202 19:18:41.255902 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:41 crc kubenswrapper[4792]: I1202 19:18:41.256890 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:41 crc kubenswrapper[4792]: I1202 19:18:41.332264 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:42 crc kubenswrapper[4792]: I1202 19:18:42.069208 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:43 crc kubenswrapper[4792]: I1202 19:18:43.711016 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmzjc"] Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.016086 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zmzjc" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="registry-server" containerID="cri-o://1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3" gracePeriod=2 Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.605420 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.659540 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-utilities\") pod \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.659734 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w897j\" (UniqueName: \"kubernetes.io/projected/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-kube-api-access-w897j\") pod \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.659920 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-catalog-content\") pod \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\" (UID: \"c5ef3f19-9535-4a40-b56d-8fa0e94a2158\") " Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.660911 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-utilities" (OuterVolumeSpecName: "utilities") pod "c5ef3f19-9535-4a40-b56d-8fa0e94a2158" (UID: "c5ef3f19-9535-4a40-b56d-8fa0e94a2158"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.670966 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-kube-api-access-w897j" (OuterVolumeSpecName: "kube-api-access-w897j") pod "c5ef3f19-9535-4a40-b56d-8fa0e94a2158" (UID: "c5ef3f19-9535-4a40-b56d-8fa0e94a2158"). InnerVolumeSpecName "kube-api-access-w897j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.729122 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5ef3f19-9535-4a40-b56d-8fa0e94a2158" (UID: "c5ef3f19-9535-4a40-b56d-8fa0e94a2158"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.762267 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w897j\" (UniqueName: \"kubernetes.io/projected/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-kube-api-access-w897j\") on node \"crc\" DevicePath \"\"" Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.762307 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:18:44 crc kubenswrapper[4792]: I1202 19:18:44.762320 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5ef3f19-9535-4a40-b56d-8fa0e94a2158-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.027213 4792 generic.go:334] "Generic (PLEG): container finished" podID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerID="1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3" exitCode=0 Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.027608 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmzjc" event={"ID":"c5ef3f19-9535-4a40-b56d-8fa0e94a2158","Type":"ContainerDied","Data":"1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3"} Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.027656 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmzjc" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.027921 4792 scope.go:117] "RemoveContainer" containerID="1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.027856 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmzjc" event={"ID":"c5ef3f19-9535-4a40-b56d-8fa0e94a2158","Type":"ContainerDied","Data":"13a079550503e8c380fec5fa5c22c56e331a5d1bfac8e27cc2960c438d1e9e7e"} Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.055898 4792 scope.go:117] "RemoveContainer" containerID="874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.067167 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmzjc"] Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.078376 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zmzjc"] Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.090621 4792 scope.go:117] "RemoveContainer" containerID="2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.128260 4792 scope.go:117] "RemoveContainer" containerID="1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3" Dec 02 19:18:45 crc kubenswrapper[4792]: E1202 19:18:45.128773 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3\": container with ID starting with 1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3 not found: ID does not exist" containerID="1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.128857 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3"} err="failed to get container status \"1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3\": rpc error: code = NotFound desc = could not find container \"1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3\": container with ID starting with 1cb1ea1f2e6537b6f8970675a42cf32c6f1305f473a52b5a0ef8db8a644a00a3 not found: ID does not exist" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.128908 4792 scope.go:117] "RemoveContainer" containerID="874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8" Dec 02 19:18:45 crc kubenswrapper[4792]: E1202 19:18:45.129301 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8\": container with ID starting with 874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8 not found: ID does not exist" containerID="874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.129348 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8"} err="failed to get container status \"874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8\": rpc error: code = NotFound desc = could not find container \"874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8\": container with ID starting with 874357106b37ea63bbfe96e221147238a67f4b9f4a2b937cddb3fe358ee97bb8 not found: ID does not exist" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.129382 4792 scope.go:117] "RemoveContainer" containerID="2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558" Dec 02 19:18:45 crc kubenswrapper[4792]: E1202 19:18:45.129744 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558\": container with ID starting with 2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558 not found: ID does not exist" containerID="2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.129786 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558"} err="failed to get container status \"2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558\": rpc error: code = NotFound desc = could not find container \"2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558\": container with ID starting with 2a6d241260041e3795792bd776856a2cf1266a70a279f860f66e9feaac826558 not found: ID does not exist" Dec 02 19:18:45 crc kubenswrapper[4792]: I1202 19:18:45.557611 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" path="/var/lib/kubelet/pods/c5ef3f19-9535-4a40-b56d-8fa0e94a2158/volumes" Dec 02 19:18:48 crc kubenswrapper[4792]: I1202 19:18:48.540813 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:18:49 crc kubenswrapper[4792]: I1202 19:18:49.106774 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"d20ab7eee014a921ea27d4d4918c14b48ebd9d68391e538f57ba8b85fb451a48"} Dec 02 19:21:08 crc kubenswrapper[4792]: I1202 19:21:08.086026 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:21:08 crc kubenswrapper[4792]: I1202 19:21:08.086664 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:21:21 crc kubenswrapper[4792]: I1202 19:21:21.007977 4792 generic.go:334] "Generic (PLEG): container finished" podID="c0a31132-972a-4e92-b005-de8cacadfe2e" containerID="3c0cd430374399fcbf1a660fcf354ddafd0fe79d17c2ac5dc7af8c0ac7e95665" exitCode=0 Dec 02 19:21:21 crc kubenswrapper[4792]: I1202 19:21:21.008126 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" event={"ID":"c0a31132-972a-4e92-b005-de8cacadfe2e","Type":"ContainerDied","Data":"3c0cd430374399fcbf1a660fcf354ddafd0fe79d17c2ac5dc7af8c0ac7e95665"} Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.534027 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.620780 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-ssh-key\") pod \"c0a31132-972a-4e92-b005-de8cacadfe2e\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.622167 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-inventory\") pod \"c0a31132-972a-4e92-b005-de8cacadfe2e\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.622198 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-secret-0\") pod \"c0a31132-972a-4e92-b005-de8cacadfe2e\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.622639 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2cch\" (UniqueName: \"kubernetes.io/projected/c0a31132-972a-4e92-b005-de8cacadfe2e-kube-api-access-w2cch\") pod \"c0a31132-972a-4e92-b005-de8cacadfe2e\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.622776 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-combined-ca-bundle\") pod \"c0a31132-972a-4e92-b005-de8cacadfe2e\" (UID: \"c0a31132-972a-4e92-b005-de8cacadfe2e\") " Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.631732 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c0a31132-972a-4e92-b005-de8cacadfe2e" (UID: "c0a31132-972a-4e92-b005-de8cacadfe2e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.635698 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0a31132-972a-4e92-b005-de8cacadfe2e-kube-api-access-w2cch" (OuterVolumeSpecName: "kube-api-access-w2cch") pod "c0a31132-972a-4e92-b005-de8cacadfe2e" (UID: "c0a31132-972a-4e92-b005-de8cacadfe2e"). InnerVolumeSpecName "kube-api-access-w2cch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.664545 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-inventory" (OuterVolumeSpecName: "inventory") pod "c0a31132-972a-4e92-b005-de8cacadfe2e" (UID: "c0a31132-972a-4e92-b005-de8cacadfe2e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.687438 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "c0a31132-972a-4e92-b005-de8cacadfe2e" (UID: "c0a31132-972a-4e92-b005-de8cacadfe2e"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.725899 4792 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.725935 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.725945 4792 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.725957 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2cch\" (UniqueName: \"kubernetes.io/projected/c0a31132-972a-4e92-b005-de8cacadfe2e-kube-api-access-w2cch\") on node \"crc\" DevicePath \"\"" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.772744 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c0a31132-972a-4e92-b005-de8cacadfe2e" (UID: "c0a31132-972a-4e92-b005-de8cacadfe2e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:21:22 crc kubenswrapper[4792]: I1202 19:21:22.831751 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c0a31132-972a-4e92-b005-de8cacadfe2e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.030595 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" event={"ID":"c0a31132-972a-4e92-b005-de8cacadfe2e","Type":"ContainerDied","Data":"f22af1972163034b33fe26eedd694ddfcf4c49f77172892c0bcf395a9de3de6f"} Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.030644 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f22af1972163034b33fe26eedd694ddfcf4c49f77172892c0bcf395a9de3de6f" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.030652 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-chd8r" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.157952 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd"] Dec 02 19:21:23 crc kubenswrapper[4792]: E1202 19:21:23.158409 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="extract-content" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.158428 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="extract-content" Dec 02 19:21:23 crc kubenswrapper[4792]: E1202 19:21:23.158444 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0a31132-972a-4e92-b005-de8cacadfe2e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.158450 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0a31132-972a-4e92-b005-de8cacadfe2e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 19:21:23 crc kubenswrapper[4792]: E1202 19:21:23.158475 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="extract-utilities" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.158481 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="extract-utilities" Dec 02 19:21:23 crc kubenswrapper[4792]: E1202 19:21:23.158492 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="registry-server" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.158498 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="registry-server" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.158743 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5ef3f19-9535-4a40-b56d-8fa0e94a2158" containerName="registry-server" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.158772 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0a31132-972a-4e92-b005-de8cacadfe2e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.159724 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.161883 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.162149 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.162196 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.163025 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.163034 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.163041 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.163146 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.169995 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd"] Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239059 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239103 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239133 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239153 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239205 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239436 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239455 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239493 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgfcj\" (UniqueName: \"kubernetes.io/projected/5516b9b1-aeb0-40a1-9eac-5c7799b85132-kube-api-access-pgfcj\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.239509 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.341850 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgfcj\" (UniqueName: \"kubernetes.io/projected/5516b9b1-aeb0-40a1-9eac-5c7799b85132-kube-api-access-pgfcj\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.341917 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.342029 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.342062 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.342100 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.342132 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.342230 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.342318 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.342349 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.344293 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.346016 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.346062 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.346250 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.346989 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.347991 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.348732 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.349020 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.361363 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgfcj\" (UniqueName: \"kubernetes.io/projected/5516b9b1-aeb0-40a1-9eac-5c7799b85132-kube-api-access-pgfcj\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bsnvd\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:23 crc kubenswrapper[4792]: I1202 19:21:23.482420 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:21:24 crc kubenswrapper[4792]: I1202 19:21:24.068710 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd"] Dec 02 19:21:24 crc kubenswrapper[4792]: I1202 19:21:24.077590 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:21:25 crc kubenswrapper[4792]: I1202 19:21:25.051293 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" event={"ID":"5516b9b1-aeb0-40a1-9eac-5c7799b85132","Type":"ContainerStarted","Data":"7b00e0c9e8cd2977b010f73581ca532c811321b44b2b1ae507a4848b318eeed0"} Dec 02 19:21:25 crc kubenswrapper[4792]: I1202 19:21:25.051671 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" event={"ID":"5516b9b1-aeb0-40a1-9eac-5c7799b85132","Type":"ContainerStarted","Data":"addcabbc03b72df3f5a7dccd81468e8b489be46321553750978d913d8db8e240"} Dec 02 19:21:25 crc kubenswrapper[4792]: I1202 19:21:25.081955 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" podStartSLOduration=1.5196538080000002 podStartE2EDuration="2.081921986s" podCreationTimestamp="2025-12-02 19:21:23 +0000 UTC" firstStartedPulling="2025-12-02 19:21:24.077280169 +0000 UTC m=+2714.850172507" lastFinishedPulling="2025-12-02 19:21:24.639548357 +0000 UTC m=+2715.412440685" observedRunningTime="2025-12-02 19:21:25.068539556 +0000 UTC m=+2715.841431884" watchObservedRunningTime="2025-12-02 19:21:25.081921986 +0000 UTC m=+2715.854814354" Dec 02 19:21:38 crc kubenswrapper[4792]: I1202 19:21:38.081058 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:21:38 crc kubenswrapper[4792]: I1202 19:21:38.081833 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.081687 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.083304 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.083708 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.084672 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d20ab7eee014a921ea27d4d4918c14b48ebd9d68391e538f57ba8b85fb451a48"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.084853 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://d20ab7eee014a921ea27d4d4918c14b48ebd9d68391e538f57ba8b85fb451a48" gracePeriod=600 Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.640291 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="d20ab7eee014a921ea27d4d4918c14b48ebd9d68391e538f57ba8b85fb451a48" exitCode=0 Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.640341 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"d20ab7eee014a921ea27d4d4918c14b48ebd9d68391e538f57ba8b85fb451a48"} Dec 02 19:22:08 crc kubenswrapper[4792]: I1202 19:22:08.640602 4792 scope.go:117] "RemoveContainer" containerID="3c67a741cfc6dd9fe0d54fd77123388dbf880be9adb881936a491df61309de1e" Dec 02 19:22:09 crc kubenswrapper[4792]: I1202 19:22:09.653460 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a"} Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.541514 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ds2h9"] Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.544700 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.610510 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ds2h9"] Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.629644 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-utilities\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.629988 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-catalog-content\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.630066 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvkvm\" (UniqueName: \"kubernetes.io/projected/337406d5-c0a0-4331-82ee-ba4ad14e55d1-kube-api-access-cvkvm\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.731858 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvkvm\" (UniqueName: \"kubernetes.io/projected/337406d5-c0a0-4331-82ee-ba4ad14e55d1-kube-api-access-cvkvm\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.732035 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-utilities\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.732084 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-catalog-content\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.732616 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-catalog-content\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.732779 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-utilities\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.759264 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvkvm\" (UniqueName: \"kubernetes.io/projected/337406d5-c0a0-4331-82ee-ba4ad14e55d1-kube-api-access-cvkvm\") pod \"redhat-marketplace-ds2h9\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:36 crc kubenswrapper[4792]: I1202 19:24:36.877837 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:37 crc kubenswrapper[4792]: I1202 19:24:37.386411 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ds2h9"] Dec 02 19:24:37 crc kubenswrapper[4792]: I1202 19:24:37.912902 4792 generic.go:334] "Generic (PLEG): container finished" podID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerID="d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422" exitCode=0 Dec 02 19:24:37 crc kubenswrapper[4792]: I1202 19:24:37.913079 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ds2h9" event={"ID":"337406d5-c0a0-4331-82ee-ba4ad14e55d1","Type":"ContainerDied","Data":"d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422"} Dec 02 19:24:37 crc kubenswrapper[4792]: I1202 19:24:37.913233 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ds2h9" event={"ID":"337406d5-c0a0-4331-82ee-ba4ad14e55d1","Type":"ContainerStarted","Data":"da247dd825c6fa72bc6f28ef41775ab33a37a7f78a6dc212f57a3f294d48289f"} Dec 02 19:24:38 crc kubenswrapper[4792]: I1202 19:24:38.081091 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:24:38 crc kubenswrapper[4792]: I1202 19:24:38.081154 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.145788 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h7wbw"] Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.149966 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.187602 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h7wbw"] Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.303043 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khk7f\" (UniqueName: \"kubernetes.io/projected/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-kube-api-access-khk7f\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.303148 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-utilities\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.303238 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-catalog-content\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.405436 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khk7f\" (UniqueName: \"kubernetes.io/projected/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-kube-api-access-khk7f\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.405603 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-utilities\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.405744 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-catalog-content\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.406253 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-utilities\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.406274 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-catalog-content\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.434581 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khk7f\" (UniqueName: \"kubernetes.io/projected/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-kube-api-access-khk7f\") pod \"certified-operators-h7wbw\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.493272 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:39 crc kubenswrapper[4792]: I1202 19:24:39.978686 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h7wbw"] Dec 02 19:24:40 crc kubenswrapper[4792]: I1202 19:24:40.947117 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerStarted","Data":"d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26"} Dec 02 19:24:40 crc kubenswrapper[4792]: I1202 19:24:40.947789 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerStarted","Data":"a40fb6c98229f18384cbf00c196cf77d35ca9c78f2dce4d2904efee0b7580481"} Dec 02 19:24:40 crc kubenswrapper[4792]: I1202 19:24:40.950947 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ds2h9" event={"ID":"337406d5-c0a0-4331-82ee-ba4ad14e55d1","Type":"ContainerStarted","Data":"ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f"} Dec 02 19:24:41 crc kubenswrapper[4792]: I1202 19:24:41.968811 4792 generic.go:334] "Generic (PLEG): container finished" podID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerID="d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26" exitCode=0 Dec 02 19:24:41 crc kubenswrapper[4792]: I1202 19:24:41.968904 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerDied","Data":"d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26"} Dec 02 19:24:41 crc kubenswrapper[4792]: I1202 19:24:41.972229 4792 generic.go:334] "Generic (PLEG): container finished" podID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerID="ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f" exitCode=0 Dec 02 19:24:41 crc kubenswrapper[4792]: I1202 19:24:41.972326 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ds2h9" event={"ID":"337406d5-c0a0-4331-82ee-ba4ad14e55d1","Type":"ContainerDied","Data":"ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f"} Dec 02 19:24:43 crc kubenswrapper[4792]: I1202 19:24:43.004123 4792 generic.go:334] "Generic (PLEG): container finished" podID="5516b9b1-aeb0-40a1-9eac-5c7799b85132" containerID="7b00e0c9e8cd2977b010f73581ca532c811321b44b2b1ae507a4848b318eeed0" exitCode=0 Dec 02 19:24:43 crc kubenswrapper[4792]: I1202 19:24:43.004329 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" event={"ID":"5516b9b1-aeb0-40a1-9eac-5c7799b85132","Type":"ContainerDied","Data":"7b00e0c9e8cd2977b010f73581ca532c811321b44b2b1ae507a4848b318eeed0"} Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.018263 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ds2h9" event={"ID":"337406d5-c0a0-4331-82ee-ba4ad14e55d1","Type":"ContainerStarted","Data":"7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100"} Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.021234 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerStarted","Data":"ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701"} Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.045074 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ds2h9" podStartSLOduration=3.08927368 podStartE2EDuration="8.045054429s" podCreationTimestamp="2025-12-02 19:24:36 +0000 UTC" firstStartedPulling="2025-12-02 19:24:37.917267546 +0000 UTC m=+2908.690159884" lastFinishedPulling="2025-12-02 19:24:42.873048295 +0000 UTC m=+2913.645940633" observedRunningTime="2025-12-02 19:24:44.043445227 +0000 UTC m=+2914.816337575" watchObservedRunningTime="2025-12-02 19:24:44.045054429 +0000 UTC m=+2914.817946767" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.735926 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.829649 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-extra-config-0\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.829712 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgfcj\" (UniqueName: \"kubernetes.io/projected/5516b9b1-aeb0-40a1-9eac-5c7799b85132-kube-api-access-pgfcj\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.829772 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-ssh-key\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.829867 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-inventory\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.829974 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-0\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.830173 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-1\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.830246 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-1\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.830322 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-0\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.830357 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-combined-ca-bundle\") pod \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\" (UID: \"5516b9b1-aeb0-40a1-9eac-5c7799b85132\") " Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.835785 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.837744 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5516b9b1-aeb0-40a1-9eac-5c7799b85132-kube-api-access-pgfcj" (OuterVolumeSpecName: "kube-api-access-pgfcj") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "kube-api-access-pgfcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.862659 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.868957 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.871623 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-inventory" (OuterVolumeSpecName: "inventory") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.872323 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.877442 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.878507 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.891958 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "5516b9b1-aeb0-40a1-9eac-5c7799b85132" (UID: "5516b9b1-aeb0-40a1-9eac-5c7799b85132"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932315 4792 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932344 4792 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932353 4792 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932362 4792 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932371 4792 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932380 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgfcj\" (UniqueName: \"kubernetes.io/projected/5516b9b1-aeb0-40a1-9eac-5c7799b85132-kube-api-access-pgfcj\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932388 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932397 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:44 crc kubenswrapper[4792]: I1202 19:24:44.932406 4792 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/5516b9b1-aeb0-40a1-9eac-5c7799b85132-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.051043 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" event={"ID":"5516b9b1-aeb0-40a1-9eac-5c7799b85132","Type":"ContainerDied","Data":"addcabbc03b72df3f5a7dccd81468e8b489be46321553750978d913d8db8e240"} Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.051101 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="addcabbc03b72df3f5a7dccd81468e8b489be46321553750978d913d8db8e240" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.051273 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bsnvd" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.157739 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4"] Dec 02 19:24:45 crc kubenswrapper[4792]: E1202 19:24:45.158335 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5516b9b1-aeb0-40a1-9eac-5c7799b85132" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.158364 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="5516b9b1-aeb0-40a1-9eac-5c7799b85132" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.158690 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="5516b9b1-aeb0-40a1-9eac-5c7799b85132" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.159676 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.163558 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.163783 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.166698 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zln7c" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.167369 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.167722 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.176278 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4"] Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.239106 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.239253 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.239295 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zpxp\" (UniqueName: \"kubernetes.io/projected/6aeeaf00-b476-4d91-a807-92fb47391287-kube-api-access-7zpxp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.239344 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.239365 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.239587 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.239739 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.342063 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.342136 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zpxp\" (UniqueName: \"kubernetes.io/projected/6aeeaf00-b476-4d91-a807-92fb47391287-kube-api-access-7zpxp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.342213 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.342244 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.342291 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.342341 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.342384 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.347535 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.348032 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.348207 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.349718 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.349908 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.351483 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.365820 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zpxp\" (UniqueName: \"kubernetes.io/projected/6aeeaf00-b476-4d91-a807-92fb47391287-kube-api-access-7zpxp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:45 crc kubenswrapper[4792]: I1202 19:24:45.483773 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:24:46 crc kubenswrapper[4792]: I1202 19:24:46.111128 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4"] Dec 02 19:24:46 crc kubenswrapper[4792]: W1202 19:24:46.112025 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6aeeaf00_b476_4d91_a807_92fb47391287.slice/crio-2745b3df3a16acff082c67f7bfd4f9973b0fc8c32d7d5d23f5adfa9cde305470 WatchSource:0}: Error finding container 2745b3df3a16acff082c67f7bfd4f9973b0fc8c32d7d5d23f5adfa9cde305470: Status 404 returned error can't find the container with id 2745b3df3a16acff082c67f7bfd4f9973b0fc8c32d7d5d23f5adfa9cde305470 Dec 02 19:24:46 crc kubenswrapper[4792]: I1202 19:24:46.878198 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:46 crc kubenswrapper[4792]: I1202 19:24:46.879045 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:46 crc kubenswrapper[4792]: I1202 19:24:46.930788 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:47 crc kubenswrapper[4792]: I1202 19:24:47.075054 4792 generic.go:334] "Generic (PLEG): container finished" podID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerID="ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701" exitCode=0 Dec 02 19:24:47 crc kubenswrapper[4792]: I1202 19:24:47.075094 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerDied","Data":"ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701"} Dec 02 19:24:47 crc kubenswrapper[4792]: I1202 19:24:47.077876 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" event={"ID":"6aeeaf00-b476-4d91-a807-92fb47391287","Type":"ContainerStarted","Data":"2745b3df3a16acff082c67f7bfd4f9973b0fc8c32d7d5d23f5adfa9cde305470"} Dec 02 19:24:50 crc kubenswrapper[4792]: I1202 19:24:50.126868 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerStarted","Data":"6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee"} Dec 02 19:24:50 crc kubenswrapper[4792]: I1202 19:24:50.129568 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" event={"ID":"6aeeaf00-b476-4d91-a807-92fb47391287","Type":"ContainerStarted","Data":"5861a211eae20f2fd0438be806b372d0e23da471e74a5fbee908f50792925310"} Dec 02 19:24:50 crc kubenswrapper[4792]: I1202 19:24:50.176320 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h7wbw" podStartSLOduration=3.849640031 podStartE2EDuration="11.176302271s" podCreationTimestamp="2025-12-02 19:24:39 +0000 UTC" firstStartedPulling="2025-12-02 19:24:41.9723134 +0000 UTC m=+2912.745205768" lastFinishedPulling="2025-12-02 19:24:49.29897566 +0000 UTC m=+2920.071868008" observedRunningTime="2025-12-02 19:24:50.170978012 +0000 UTC m=+2920.943870380" watchObservedRunningTime="2025-12-02 19:24:50.176302271 +0000 UTC m=+2920.949194599" Dec 02 19:24:50 crc kubenswrapper[4792]: I1202 19:24:50.200189 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" podStartSLOduration=2.189362807 podStartE2EDuration="5.200170577s" podCreationTimestamp="2025-12-02 19:24:45 +0000 UTC" firstStartedPulling="2025-12-02 19:24:46.118117114 +0000 UTC m=+2916.891009442" lastFinishedPulling="2025-12-02 19:24:49.128924844 +0000 UTC m=+2919.901817212" observedRunningTime="2025-12-02 19:24:50.190921915 +0000 UTC m=+2920.963814253" watchObservedRunningTime="2025-12-02 19:24:50.200170577 +0000 UTC m=+2920.973062915" Dec 02 19:24:56 crc kubenswrapper[4792]: I1202 19:24:56.969042 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.028379 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ds2h9"] Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.217596 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ds2h9" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="registry-server" containerID="cri-o://7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100" gracePeriod=2 Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.855692 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.945292 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-utilities\") pod \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.945474 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvkvm\" (UniqueName: \"kubernetes.io/projected/337406d5-c0a0-4331-82ee-ba4ad14e55d1-kube-api-access-cvkvm\") pod \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.945657 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-catalog-content\") pod \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\" (UID: \"337406d5-c0a0-4331-82ee-ba4ad14e55d1\") " Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.946632 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-utilities" (OuterVolumeSpecName: "utilities") pod "337406d5-c0a0-4331-82ee-ba4ad14e55d1" (UID: "337406d5-c0a0-4331-82ee-ba4ad14e55d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.961302 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/337406d5-c0a0-4331-82ee-ba4ad14e55d1-kube-api-access-cvkvm" (OuterVolumeSpecName: "kube-api-access-cvkvm") pod "337406d5-c0a0-4331-82ee-ba4ad14e55d1" (UID: "337406d5-c0a0-4331-82ee-ba4ad14e55d1"). InnerVolumeSpecName "kube-api-access-cvkvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:24:57 crc kubenswrapper[4792]: I1202 19:24:57.987046 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "337406d5-c0a0-4331-82ee-ba4ad14e55d1" (UID: "337406d5-c0a0-4331-82ee-ba4ad14e55d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.048796 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.049129 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337406d5-c0a0-4331-82ee-ba4ad14e55d1-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.049221 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvkvm\" (UniqueName: \"kubernetes.io/projected/337406d5-c0a0-4331-82ee-ba4ad14e55d1-kube-api-access-cvkvm\") on node \"crc\" DevicePath \"\"" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.230881 4792 generic.go:334] "Generic (PLEG): container finished" podID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerID="7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100" exitCode=0 Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.230940 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ds2h9" event={"ID":"337406d5-c0a0-4331-82ee-ba4ad14e55d1","Type":"ContainerDied","Data":"7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100"} Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.231358 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ds2h9" event={"ID":"337406d5-c0a0-4331-82ee-ba4ad14e55d1","Type":"ContainerDied","Data":"da247dd825c6fa72bc6f28ef41775ab33a37a7f78a6dc212f57a3f294d48289f"} Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.231086 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ds2h9" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.231413 4792 scope.go:117] "RemoveContainer" containerID="7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.288962 4792 scope.go:117] "RemoveContainer" containerID="ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.297040 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ds2h9"] Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.311360 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ds2h9"] Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.322610 4792 scope.go:117] "RemoveContainer" containerID="d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.385122 4792 scope.go:117] "RemoveContainer" containerID="7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100" Dec 02 19:24:58 crc kubenswrapper[4792]: E1202 19:24:58.385843 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100\": container with ID starting with 7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100 not found: ID does not exist" containerID="7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.385884 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100"} err="failed to get container status \"7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100\": rpc error: code = NotFound desc = could not find container \"7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100\": container with ID starting with 7ac56a47a70eeda95d1fcef521c83db573e3962d6c9742814fb714d4b7d48100 not found: ID does not exist" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.385917 4792 scope.go:117] "RemoveContainer" containerID="ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f" Dec 02 19:24:58 crc kubenswrapper[4792]: E1202 19:24:58.386514 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f\": container with ID starting with ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f not found: ID does not exist" containerID="ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.386568 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f"} err="failed to get container status \"ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f\": rpc error: code = NotFound desc = could not find container \"ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f\": container with ID starting with ed3ef9f8e3dae09d6d7710e3ca1b6c1eb34b6e05c1ba65f32c7bb59a6bbc724f not found: ID does not exist" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.386599 4792 scope.go:117] "RemoveContainer" containerID="d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422" Dec 02 19:24:58 crc kubenswrapper[4792]: E1202 19:24:58.387205 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422\": container with ID starting with d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422 not found: ID does not exist" containerID="d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422" Dec 02 19:24:58 crc kubenswrapper[4792]: I1202 19:24:58.387291 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422"} err="failed to get container status \"d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422\": rpc error: code = NotFound desc = could not find container \"d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422\": container with ID starting with d60aa1fe16870ebe99a9b81543c8f0a3721a9c208f3d8a40efd0d6504fbbf422 not found: ID does not exist" Dec 02 19:24:59 crc kubenswrapper[4792]: I1202 19:24:59.494659 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:59 crc kubenswrapper[4792]: I1202 19:24:59.495061 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:24:59 crc kubenswrapper[4792]: I1202 19:24:59.554333 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" path="/var/lib/kubelet/pods/337406d5-c0a0-4331-82ee-ba4ad14e55d1/volumes" Dec 02 19:24:59 crc kubenswrapper[4792]: I1202 19:24:59.562979 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:25:00 crc kubenswrapper[4792]: I1202 19:25:00.298774 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:25:00 crc kubenswrapper[4792]: I1202 19:25:00.620626 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h7wbw"] Dec 02 19:25:02 crc kubenswrapper[4792]: I1202 19:25:02.278915 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h7wbw" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="registry-server" containerID="cri-o://6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee" gracePeriod=2 Dec 02 19:25:02 crc kubenswrapper[4792]: I1202 19:25:02.910023 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:25:02 crc kubenswrapper[4792]: I1202 19:25:02.974537 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khk7f\" (UniqueName: \"kubernetes.io/projected/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-kube-api-access-khk7f\") pod \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " Dec 02 19:25:02 crc kubenswrapper[4792]: I1202 19:25:02.975098 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-catalog-content\") pod \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " Dec 02 19:25:02 crc kubenswrapper[4792]: I1202 19:25:02.975179 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-utilities\") pod \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\" (UID: \"462f8c01-a31a-4ee7-bf5b-c9ddd66be850\") " Dec 02 19:25:02 crc kubenswrapper[4792]: I1202 19:25:02.976647 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-utilities" (OuterVolumeSpecName: "utilities") pod "462f8c01-a31a-4ee7-bf5b-c9ddd66be850" (UID: "462f8c01-a31a-4ee7-bf5b-c9ddd66be850"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:25:02 crc kubenswrapper[4792]: I1202 19:25:02.988817 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-kube-api-access-khk7f" (OuterVolumeSpecName: "kube-api-access-khk7f") pod "462f8c01-a31a-4ee7-bf5b-c9ddd66be850" (UID: "462f8c01-a31a-4ee7-bf5b-c9ddd66be850"). InnerVolumeSpecName "kube-api-access-khk7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.044484 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "462f8c01-a31a-4ee7-bf5b-c9ddd66be850" (UID: "462f8c01-a31a-4ee7-bf5b-c9ddd66be850"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.080816 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.081078 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.081139 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khk7f\" (UniqueName: \"kubernetes.io/projected/462f8c01-a31a-4ee7-bf5b-c9ddd66be850-kube-api-access-khk7f\") on node \"crc\" DevicePath \"\"" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.320810 4792 generic.go:334] "Generic (PLEG): container finished" podID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerID="6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee" exitCode=0 Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.320861 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerDied","Data":"6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee"} Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.320892 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h7wbw" event={"ID":"462f8c01-a31a-4ee7-bf5b-c9ddd66be850","Type":"ContainerDied","Data":"a40fb6c98229f18384cbf00c196cf77d35ca9c78f2dce4d2904efee0b7580481"} Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.320913 4792 scope.go:117] "RemoveContainer" containerID="6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.321057 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h7wbw" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.361368 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h7wbw"] Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.367065 4792 scope.go:117] "RemoveContainer" containerID="ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.378159 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h7wbw"] Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.394601 4792 scope.go:117] "RemoveContainer" containerID="d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.450198 4792 scope.go:117] "RemoveContainer" containerID="6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee" Dec 02 19:25:03 crc kubenswrapper[4792]: E1202 19:25:03.452362 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee\": container with ID starting with 6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee not found: ID does not exist" containerID="6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.452489 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee"} err="failed to get container status \"6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee\": rpc error: code = NotFound desc = could not find container \"6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee\": container with ID starting with 6d239cfeacc6b0933781ba385485f111c0685daef838402959f9b2359ae535ee not found: ID does not exist" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.452617 4792 scope.go:117] "RemoveContainer" containerID="ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701" Dec 02 19:25:03 crc kubenswrapper[4792]: E1202 19:25:03.453059 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701\": container with ID starting with ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701 not found: ID does not exist" containerID="ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.453186 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701"} err="failed to get container status \"ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701\": rpc error: code = NotFound desc = could not find container \"ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701\": container with ID starting with ac573ee86f7966d0cc7ac19874e4d4d1884b2f38cd71a131bd55b4e34ee92701 not found: ID does not exist" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.453275 4792 scope.go:117] "RemoveContainer" containerID="d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26" Dec 02 19:25:03 crc kubenswrapper[4792]: E1202 19:25:03.453990 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26\": container with ID starting with d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26 not found: ID does not exist" containerID="d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.454093 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26"} err="failed to get container status \"d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26\": rpc error: code = NotFound desc = could not find container \"d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26\": container with ID starting with d8e47a7e16e116d7a05870a835060bb86c901200892dbb4da1db3036f50e3a26 not found: ID does not exist" Dec 02 19:25:03 crc kubenswrapper[4792]: I1202 19:25:03.554029 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" path="/var/lib/kubelet/pods/462f8c01-a31a-4ee7-bf5b-c9ddd66be850/volumes" Dec 02 19:25:08 crc kubenswrapper[4792]: I1202 19:25:08.086653 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:25:08 crc kubenswrapper[4792]: I1202 19:25:08.087341 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.081408 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.082095 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.082171 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.083203 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.083254 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" gracePeriod=600 Dec 02 19:25:38 crc kubenswrapper[4792]: E1202 19:25:38.229097 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.770064 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" exitCode=0 Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.770108 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a"} Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.770140 4792 scope.go:117] "RemoveContainer" containerID="d20ab7eee014a921ea27d4d4918c14b48ebd9d68391e538f57ba8b85fb451a48" Dec 02 19:25:38 crc kubenswrapper[4792]: I1202 19:25:38.770570 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:25:38 crc kubenswrapper[4792]: E1202 19:25:38.770880 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:25:52 crc kubenswrapper[4792]: I1202 19:25:52.540366 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:25:52 crc kubenswrapper[4792]: E1202 19:25:52.541343 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:26:03 crc kubenswrapper[4792]: I1202 19:26:03.539834 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:26:03 crc kubenswrapper[4792]: E1202 19:26:03.540660 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:26:14 crc kubenswrapper[4792]: I1202 19:26:14.540009 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:26:14 crc kubenswrapper[4792]: E1202 19:26:14.541204 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:26:28 crc kubenswrapper[4792]: I1202 19:26:28.539573 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:26:28 crc kubenswrapper[4792]: E1202 19:26:28.540296 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:26:43 crc kubenswrapper[4792]: I1202 19:26:43.540226 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:26:43 crc kubenswrapper[4792]: E1202 19:26:43.541203 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:26:58 crc kubenswrapper[4792]: I1202 19:26:58.540813 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:26:58 crc kubenswrapper[4792]: E1202 19:26:58.541950 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:27:09 crc kubenswrapper[4792]: I1202 19:27:09.554093 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:27:09 crc kubenswrapper[4792]: E1202 19:27:09.554826 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:27:21 crc kubenswrapper[4792]: I1202 19:27:21.540044 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:27:21 crc kubenswrapper[4792]: E1202 19:27:21.541392 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.210434 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v8hs5"] Dec 02 19:27:26 crc kubenswrapper[4792]: E1202 19:27:26.211401 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="registry-server" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211415 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="registry-server" Dec 02 19:27:26 crc kubenswrapper[4792]: E1202 19:27:26.211432 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="extract-utilities" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211438 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="extract-utilities" Dec 02 19:27:26 crc kubenswrapper[4792]: E1202 19:27:26.211473 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="extract-content" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211484 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="extract-content" Dec 02 19:27:26 crc kubenswrapper[4792]: E1202 19:27:26.211498 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="registry-server" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211506 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="registry-server" Dec 02 19:27:26 crc kubenswrapper[4792]: E1202 19:27:26.211546 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="extract-utilities" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211553 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="extract-utilities" Dec 02 19:27:26 crc kubenswrapper[4792]: E1202 19:27:26.211562 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="extract-content" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211567 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="extract-content" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211773 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="462f8c01-a31a-4ee7-bf5b-c9ddd66be850" containerName="registry-server" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.211794 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="337406d5-c0a0-4331-82ee-ba4ad14e55d1" containerName="registry-server" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.213216 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.235622 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8hs5"] Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.347068 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcgsw\" (UniqueName: \"kubernetes.io/projected/23e9790b-3973-4d26-889f-6d206c136b60-kube-api-access-bcgsw\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.347211 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-catalog-content\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.347248 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-utilities\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.449583 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcgsw\" (UniqueName: \"kubernetes.io/projected/23e9790b-3973-4d26-889f-6d206c136b60-kube-api-access-bcgsw\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.449676 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-catalog-content\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.449704 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-utilities\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.450191 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-utilities\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.450631 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-catalog-content\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.471470 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcgsw\" (UniqueName: \"kubernetes.io/projected/23e9790b-3973-4d26-889f-6d206c136b60-kube-api-access-bcgsw\") pod \"redhat-operators-v8hs5\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:26 crc kubenswrapper[4792]: I1202 19:27:26.570847 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:27 crc kubenswrapper[4792]: I1202 19:27:27.095732 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v8hs5"] Dec 02 19:27:27 crc kubenswrapper[4792]: I1202 19:27:27.115635 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8hs5" event={"ID":"23e9790b-3973-4d26-889f-6d206c136b60","Type":"ContainerStarted","Data":"5fd0f819bf1af50530e07536238733901fbe154ca07388eea867625db577887c"} Dec 02 19:27:28 crc kubenswrapper[4792]: I1202 19:27:28.126132 4792 generic.go:334] "Generic (PLEG): container finished" podID="23e9790b-3973-4d26-889f-6d206c136b60" containerID="d9a2aa8adc215839971c2e2b57006ff501871ee50881479d603c24ba68e02a17" exitCode=0 Dec 02 19:27:28 crc kubenswrapper[4792]: I1202 19:27:28.126199 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8hs5" event={"ID":"23e9790b-3973-4d26-889f-6d206c136b60","Type":"ContainerDied","Data":"d9a2aa8adc215839971c2e2b57006ff501871ee50881479d603c24ba68e02a17"} Dec 02 19:27:28 crc kubenswrapper[4792]: I1202 19:27:28.129062 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:27:29 crc kubenswrapper[4792]: I1202 19:27:29.138918 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8hs5" event={"ID":"23e9790b-3973-4d26-889f-6d206c136b60","Type":"ContainerStarted","Data":"12b281cb57f8601fc2db16e2c508920c5b5889d6f9f3e9b93240e8a07a7cf659"} Dec 02 19:27:32 crc kubenswrapper[4792]: I1202 19:27:32.179354 4792 generic.go:334] "Generic (PLEG): container finished" podID="23e9790b-3973-4d26-889f-6d206c136b60" containerID="12b281cb57f8601fc2db16e2c508920c5b5889d6f9f3e9b93240e8a07a7cf659" exitCode=0 Dec 02 19:27:32 crc kubenswrapper[4792]: I1202 19:27:32.179467 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8hs5" event={"ID":"23e9790b-3973-4d26-889f-6d206c136b60","Type":"ContainerDied","Data":"12b281cb57f8601fc2db16e2c508920c5b5889d6f9f3e9b93240e8a07a7cf659"} Dec 02 19:27:32 crc kubenswrapper[4792]: I1202 19:27:32.540783 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:27:32 crc kubenswrapper[4792]: E1202 19:27:32.541153 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:27:33 crc kubenswrapper[4792]: I1202 19:27:33.194978 4792 generic.go:334] "Generic (PLEG): container finished" podID="6aeeaf00-b476-4d91-a807-92fb47391287" containerID="5861a211eae20f2fd0438be806b372d0e23da471e74a5fbee908f50792925310" exitCode=0 Dec 02 19:27:33 crc kubenswrapper[4792]: I1202 19:27:33.195057 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" event={"ID":"6aeeaf00-b476-4d91-a807-92fb47391287","Type":"ContainerDied","Data":"5861a211eae20f2fd0438be806b372d0e23da471e74a5fbee908f50792925310"} Dec 02 19:27:33 crc kubenswrapper[4792]: I1202 19:27:33.198905 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8hs5" event={"ID":"23e9790b-3973-4d26-889f-6d206c136b60","Type":"ContainerStarted","Data":"ebe244586c6eb10de727540318867db63aa1d4e0f8bb2dfd16c10bfe6a001d62"} Dec 02 19:27:33 crc kubenswrapper[4792]: I1202 19:27:33.249630 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v8hs5" podStartSLOduration=2.691142102 podStartE2EDuration="7.249600269s" podCreationTimestamp="2025-12-02 19:27:26 +0000 UTC" firstStartedPulling="2025-12-02 19:27:28.128852896 +0000 UTC m=+3078.901745214" lastFinishedPulling="2025-12-02 19:27:32.687311033 +0000 UTC m=+3083.460203381" observedRunningTime="2025-12-02 19:27:33.238091807 +0000 UTC m=+3084.010984175" watchObservedRunningTime="2025-12-02 19:27:33.249600269 +0000 UTC m=+3084.022492637" Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.792626 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.959095 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-1\") pod \"6aeeaf00-b476-4d91-a807-92fb47391287\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.959188 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-0\") pod \"6aeeaf00-b476-4d91-a807-92fb47391287\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.959223 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-inventory\") pod \"6aeeaf00-b476-4d91-a807-92fb47391287\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.959262 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ssh-key\") pod \"6aeeaf00-b476-4d91-a807-92fb47391287\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.959380 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-telemetry-combined-ca-bundle\") pod \"6aeeaf00-b476-4d91-a807-92fb47391287\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.959459 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zpxp\" (UniqueName: \"kubernetes.io/projected/6aeeaf00-b476-4d91-a807-92fb47391287-kube-api-access-7zpxp\") pod \"6aeeaf00-b476-4d91-a807-92fb47391287\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.959642 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-2\") pod \"6aeeaf00-b476-4d91-a807-92fb47391287\" (UID: \"6aeeaf00-b476-4d91-a807-92fb47391287\") " Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.964946 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "6aeeaf00-b476-4d91-a807-92fb47391287" (UID: "6aeeaf00-b476-4d91-a807-92fb47391287"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.977863 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6aeeaf00-b476-4d91-a807-92fb47391287-kube-api-access-7zpxp" (OuterVolumeSpecName: "kube-api-access-7zpxp") pod "6aeeaf00-b476-4d91-a807-92fb47391287" (UID: "6aeeaf00-b476-4d91-a807-92fb47391287"). InnerVolumeSpecName "kube-api-access-7zpxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:27:34 crc kubenswrapper[4792]: I1202 19:27:34.992647 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "6aeeaf00-b476-4d91-a807-92fb47391287" (UID: "6aeeaf00-b476-4d91-a807-92fb47391287"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.006410 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "6aeeaf00-b476-4d91-a807-92fb47391287" (UID: "6aeeaf00-b476-4d91-a807-92fb47391287"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.031160 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6aeeaf00-b476-4d91-a807-92fb47391287" (UID: "6aeeaf00-b476-4d91-a807-92fb47391287"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.031203 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-inventory" (OuterVolumeSpecName: "inventory") pod "6aeeaf00-b476-4d91-a807-92fb47391287" (UID: "6aeeaf00-b476-4d91-a807-92fb47391287"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.042968 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "6aeeaf00-b476-4d91-a807-92fb47391287" (UID: "6aeeaf00-b476-4d91-a807-92fb47391287"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.063058 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.063101 4792 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.063116 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zpxp\" (UniqueName: \"kubernetes.io/projected/6aeeaf00-b476-4d91-a807-92fb47391287-kube-api-access-7zpxp\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.063126 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.063140 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.063151 4792 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.063162 4792 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6aeeaf00-b476-4d91-a807-92fb47391287-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.227123 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" event={"ID":"6aeeaf00-b476-4d91-a807-92fb47391287","Type":"ContainerDied","Data":"2745b3df3a16acff082c67f7bfd4f9973b0fc8c32d7d5d23f5adfa9cde305470"} Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.227183 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2745b3df3a16acff082c67f7bfd4f9973b0fc8c32d7d5d23f5adfa9cde305470" Dec 02 19:27:35 crc kubenswrapper[4792]: I1202 19:27:35.227238 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4" Dec 02 19:27:36 crc kubenswrapper[4792]: I1202 19:27:36.572162 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:36 crc kubenswrapper[4792]: I1202 19:27:36.572458 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:37 crc kubenswrapper[4792]: I1202 19:27:37.635083 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-v8hs5" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="registry-server" probeResult="failure" output=< Dec 02 19:27:37 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 19:27:37 crc kubenswrapper[4792]: > Dec 02 19:27:46 crc kubenswrapper[4792]: I1202 19:27:46.540254 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:27:46 crc kubenswrapper[4792]: E1202 19:27:46.541447 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:27:46 crc kubenswrapper[4792]: I1202 19:27:46.651994 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:46 crc kubenswrapper[4792]: I1202 19:27:46.730151 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.244230 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8hs5"] Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.244796 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v8hs5" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="registry-server" containerID="cri-o://ebe244586c6eb10de727540318867db63aa1d4e0f8bb2dfd16c10bfe6a001d62" gracePeriod=2 Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.403667 4792 generic.go:334] "Generic (PLEG): container finished" podID="23e9790b-3973-4d26-889f-6d206c136b60" containerID="ebe244586c6eb10de727540318867db63aa1d4e0f8bb2dfd16c10bfe6a001d62" exitCode=0 Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.403769 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8hs5" event={"ID":"23e9790b-3973-4d26-889f-6d206c136b60","Type":"ContainerDied","Data":"ebe244586c6eb10de727540318867db63aa1d4e0f8bb2dfd16c10bfe6a001d62"} Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.838732 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.906666 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-utilities\") pod \"23e9790b-3973-4d26-889f-6d206c136b60\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.906826 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-catalog-content\") pod \"23e9790b-3973-4d26-889f-6d206c136b60\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.906851 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcgsw\" (UniqueName: \"kubernetes.io/projected/23e9790b-3973-4d26-889f-6d206c136b60-kube-api-access-bcgsw\") pod \"23e9790b-3973-4d26-889f-6d206c136b60\" (UID: \"23e9790b-3973-4d26-889f-6d206c136b60\") " Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.908440 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-utilities" (OuterVolumeSpecName: "utilities") pod "23e9790b-3973-4d26-889f-6d206c136b60" (UID: "23e9790b-3973-4d26-889f-6d206c136b60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:27:49 crc kubenswrapper[4792]: I1202 19:27:49.916364 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23e9790b-3973-4d26-889f-6d206c136b60-kube-api-access-bcgsw" (OuterVolumeSpecName: "kube-api-access-bcgsw") pod "23e9790b-3973-4d26-889f-6d206c136b60" (UID: "23e9790b-3973-4d26-889f-6d206c136b60"). InnerVolumeSpecName "kube-api-access-bcgsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.009712 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcgsw\" (UniqueName: \"kubernetes.io/projected/23e9790b-3973-4d26-889f-6d206c136b60-kube-api-access-bcgsw\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.009743 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.054014 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23e9790b-3973-4d26-889f-6d206c136b60" (UID: "23e9790b-3973-4d26-889f-6d206c136b60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.113141 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23e9790b-3973-4d26-889f-6d206c136b60-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.423765 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v8hs5" event={"ID":"23e9790b-3973-4d26-889f-6d206c136b60","Type":"ContainerDied","Data":"5fd0f819bf1af50530e07536238733901fbe154ca07388eea867625db577887c"} Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.423862 4792 scope.go:117] "RemoveContainer" containerID="ebe244586c6eb10de727540318867db63aa1d4e0f8bb2dfd16c10bfe6a001d62" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.423869 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v8hs5" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.463660 4792 scope.go:117] "RemoveContainer" containerID="12b281cb57f8601fc2db16e2c508920c5b5889d6f9f3e9b93240e8a07a7cf659" Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.485546 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v8hs5"] Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.503196 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v8hs5"] Dec 02 19:27:50 crc kubenswrapper[4792]: I1202 19:27:50.510192 4792 scope.go:117] "RemoveContainer" containerID="d9a2aa8adc215839971c2e2b57006ff501871ee50881479d603c24ba68e02a17" Dec 02 19:27:51 crc kubenswrapper[4792]: I1202 19:27:51.563718 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23e9790b-3973-4d26-889f-6d206c136b60" path="/var/lib/kubelet/pods/23e9790b-3973-4d26-889f-6d206c136b60/volumes" Dec 02 19:27:57 crc kubenswrapper[4792]: I1202 19:27:57.540550 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:27:57 crc kubenswrapper[4792]: E1202 19:27:57.541411 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:28:11 crc kubenswrapper[4792]: I1202 19:28:11.540050 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:28:11 crc kubenswrapper[4792]: E1202 19:28:11.540973 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:28:25 crc kubenswrapper[4792]: I1202 19:28:25.540258 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:28:25 crc kubenswrapper[4792]: E1202 19:28:25.541645 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:28:40 crc kubenswrapper[4792]: I1202 19:28:40.539761 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:28:40 crc kubenswrapper[4792]: E1202 19:28:40.540532 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.496469 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 19:28:54 crc kubenswrapper[4792]: E1202 19:28:54.497905 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="extract-utilities" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.497931 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="extract-utilities" Dec 02 19:28:54 crc kubenswrapper[4792]: E1202 19:28:54.497982 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6aeeaf00-b476-4d91-a807-92fb47391287" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.497995 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6aeeaf00-b476-4d91-a807-92fb47391287" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 19:28:54 crc kubenswrapper[4792]: E1202 19:28:54.498012 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="extract-content" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.498024 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="extract-content" Dec 02 19:28:54 crc kubenswrapper[4792]: E1202 19:28:54.498076 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="registry-server" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.498091 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="registry-server" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.498447 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="23e9790b-3973-4d26-889f-6d206c136b60" containerName="registry-server" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.498474 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6aeeaf00-b476-4d91-a807-92fb47391287" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.499884 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.502842 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.503112 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-5k6l8" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.503365 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.504378 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.519101 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.661597 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.662059 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.662448 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.662664 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9lxr\" (UniqueName: \"kubernetes.io/projected/22c610ff-ae47-48ac-8fea-c3ab17f23106-kube-api-access-j9lxr\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.663340 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.663590 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-config-data\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.663746 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.664132 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.664336 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.767493 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.767817 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.767995 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769007 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769129 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769017 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769216 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9lxr\" (UniqueName: \"kubernetes.io/projected/22c610ff-ae47-48ac-8fea-c3ab17f23106-kube-api-access-j9lxr\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769200 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769461 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769597 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-config-data\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769645 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.769830 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.770293 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.771707 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-config-data\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.776791 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.777804 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.778020 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.795274 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9lxr\" (UniqueName: \"kubernetes.io/projected/22c610ff-ae47-48ac-8fea-c3ab17f23106-kube-api-access-j9lxr\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.809635 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " pod="openstack/tempest-tests-tempest" Dec 02 19:28:54 crc kubenswrapper[4792]: I1202 19:28:54.831777 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 19:28:55 crc kubenswrapper[4792]: I1202 19:28:55.397329 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 19:28:55 crc kubenswrapper[4792]: I1202 19:28:55.541419 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:28:55 crc kubenswrapper[4792]: E1202 19:28:55.541729 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:28:56 crc kubenswrapper[4792]: I1202 19:28:56.222380 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"22c610ff-ae47-48ac-8fea-c3ab17f23106","Type":"ContainerStarted","Data":"1eeb87c1b30573491fd8daf540f8d309fe2805ef10cfb33caf7ccc4d93009407"} Dec 02 19:29:08 crc kubenswrapper[4792]: I1202 19:29:08.540474 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:29:08 crc kubenswrapper[4792]: E1202 19:29:08.541476 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.411367 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ghxlh"] Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.415203 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.445462 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ghxlh"] Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.562497 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-catalog-content\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.562631 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz6xc\" (UniqueName: \"kubernetes.io/projected/1295ef51-62d2-4227-bd28-85b205f8a00a-kube-api-access-jz6xc\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.562815 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-utilities\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.665056 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz6xc\" (UniqueName: \"kubernetes.io/projected/1295ef51-62d2-4227-bd28-85b205f8a00a-kube-api-access-jz6xc\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.666274 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-utilities\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.666366 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-catalog-content\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.666749 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-utilities\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.666797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-catalog-content\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.707377 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz6xc\" (UniqueName: \"kubernetes.io/projected/1295ef51-62d2-4227-bd28-85b205f8a00a-kube-api-access-jz6xc\") pod \"community-operators-ghxlh\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:19 crc kubenswrapper[4792]: I1202 19:29:19.744899 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:22 crc kubenswrapper[4792]: I1202 19:29:22.539257 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:29:22 crc kubenswrapper[4792]: E1202 19:29:22.539969 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:29:26 crc kubenswrapper[4792]: E1202 19:29:26.315676 4792 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 02 19:29:26 crc kubenswrapper[4792]: E1202 19:29:26.316780 4792 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j9lxr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(22c610ff-ae47-48ac-8fea-c3ab17f23106): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 19:29:26 crc kubenswrapper[4792]: E1202 19:29:26.318465 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="22c610ff-ae47-48ac-8fea-c3ab17f23106" Dec 02 19:29:26 crc kubenswrapper[4792]: I1202 19:29:26.792477 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ghxlh"] Dec 02 19:29:26 crc kubenswrapper[4792]: I1202 19:29:26.852029 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghxlh" event={"ID":"1295ef51-62d2-4227-bd28-85b205f8a00a","Type":"ContainerStarted","Data":"75ce922008b91bff2603f07b83c31531a864e8be3e63063653abba0a351980ca"} Dec 02 19:29:26 crc kubenswrapper[4792]: E1202 19:29:26.855643 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="22c610ff-ae47-48ac-8fea-c3ab17f23106" Dec 02 19:29:27 crc kubenswrapper[4792]: I1202 19:29:27.869341 4792 generic.go:334] "Generic (PLEG): container finished" podID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerID="d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a" exitCode=0 Dec 02 19:29:27 crc kubenswrapper[4792]: I1202 19:29:27.869476 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghxlh" event={"ID":"1295ef51-62d2-4227-bd28-85b205f8a00a","Type":"ContainerDied","Data":"d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a"} Dec 02 19:29:29 crc kubenswrapper[4792]: I1202 19:29:29.891354 4792 generic.go:334] "Generic (PLEG): container finished" podID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerID="bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00" exitCode=0 Dec 02 19:29:29 crc kubenswrapper[4792]: I1202 19:29:29.891395 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghxlh" event={"ID":"1295ef51-62d2-4227-bd28-85b205f8a00a","Type":"ContainerDied","Data":"bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00"} Dec 02 19:29:31 crc kubenswrapper[4792]: I1202 19:29:31.927244 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghxlh" event={"ID":"1295ef51-62d2-4227-bd28-85b205f8a00a","Type":"ContainerStarted","Data":"714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5"} Dec 02 19:29:31 crc kubenswrapper[4792]: I1202 19:29:31.963613 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ghxlh" podStartSLOduration=10.221405782 podStartE2EDuration="12.963594924s" podCreationTimestamp="2025-12-02 19:29:19 +0000 UTC" firstStartedPulling="2025-12-02 19:29:27.872124484 +0000 UTC m=+3198.645016812" lastFinishedPulling="2025-12-02 19:29:30.614313596 +0000 UTC m=+3201.387205954" observedRunningTime="2025-12-02 19:29:31.954650799 +0000 UTC m=+3202.727543137" watchObservedRunningTime="2025-12-02 19:29:31.963594924 +0000 UTC m=+3202.736487252" Dec 02 19:29:35 crc kubenswrapper[4792]: I1202 19:29:35.539991 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:29:35 crc kubenswrapper[4792]: E1202 19:29:35.542774 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:29:39 crc kubenswrapper[4792]: I1202 19:29:39.746153 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:39 crc kubenswrapper[4792]: I1202 19:29:39.746880 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:39 crc kubenswrapper[4792]: I1202 19:29:39.831017 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:40 crc kubenswrapper[4792]: I1202 19:29:40.097383 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:40 crc kubenswrapper[4792]: I1202 19:29:40.164593 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ghxlh"] Dec 02 19:29:41 crc kubenswrapper[4792]: I1202 19:29:41.245559 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.050639 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ghxlh" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="registry-server" containerID="cri-o://714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5" gracePeriod=2 Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.549788 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.710301 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz6xc\" (UniqueName: \"kubernetes.io/projected/1295ef51-62d2-4227-bd28-85b205f8a00a-kube-api-access-jz6xc\") pod \"1295ef51-62d2-4227-bd28-85b205f8a00a\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.710364 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-catalog-content\") pod \"1295ef51-62d2-4227-bd28-85b205f8a00a\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.710408 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-utilities\") pod \"1295ef51-62d2-4227-bd28-85b205f8a00a\" (UID: \"1295ef51-62d2-4227-bd28-85b205f8a00a\") " Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.714201 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-utilities" (OuterVolumeSpecName: "utilities") pod "1295ef51-62d2-4227-bd28-85b205f8a00a" (UID: "1295ef51-62d2-4227-bd28-85b205f8a00a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.718320 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1295ef51-62d2-4227-bd28-85b205f8a00a-kube-api-access-jz6xc" (OuterVolumeSpecName: "kube-api-access-jz6xc") pod "1295ef51-62d2-4227-bd28-85b205f8a00a" (UID: "1295ef51-62d2-4227-bd28-85b205f8a00a"). InnerVolumeSpecName "kube-api-access-jz6xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.761555 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1295ef51-62d2-4227-bd28-85b205f8a00a" (UID: "1295ef51-62d2-4227-bd28-85b205f8a00a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.813623 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz6xc\" (UniqueName: \"kubernetes.io/projected/1295ef51-62d2-4227-bd28-85b205f8a00a-kube-api-access-jz6xc\") on node \"crc\" DevicePath \"\"" Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.813651 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:29:42 crc kubenswrapper[4792]: I1202 19:29:42.813662 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1295ef51-62d2-4227-bd28-85b205f8a00a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.064335 4792 generic.go:334] "Generic (PLEG): container finished" podID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerID="714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5" exitCode=0 Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.064437 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghxlh" event={"ID":"1295ef51-62d2-4227-bd28-85b205f8a00a","Type":"ContainerDied","Data":"714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5"} Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.064478 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghxlh" event={"ID":"1295ef51-62d2-4227-bd28-85b205f8a00a","Type":"ContainerDied","Data":"75ce922008b91bff2603f07b83c31531a864e8be3e63063653abba0a351980ca"} Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.064506 4792 scope.go:117] "RemoveContainer" containerID="714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.064728 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghxlh" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.081440 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"22c610ff-ae47-48ac-8fea-c3ab17f23106","Type":"ContainerStarted","Data":"abf98db9b5e0eba946d05b230d4940f41b111de60f19b839e7f488296d6d94b9"} Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.099348 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.245898365 podStartE2EDuration="50.099321231s" podCreationTimestamp="2025-12-02 19:28:53 +0000 UTC" firstStartedPulling="2025-12-02 19:28:55.389077636 +0000 UTC m=+3166.161969974" lastFinishedPulling="2025-12-02 19:29:41.242500492 +0000 UTC m=+3212.015392840" observedRunningTime="2025-12-02 19:29:43.095915862 +0000 UTC m=+3213.868808180" watchObservedRunningTime="2025-12-02 19:29:43.099321231 +0000 UTC m=+3213.872213569" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.135471 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ghxlh"] Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.145884 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ghxlh"] Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.151331 4792 scope.go:117] "RemoveContainer" containerID="bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.171667 4792 scope.go:117] "RemoveContainer" containerID="d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.230471 4792 scope.go:117] "RemoveContainer" containerID="714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5" Dec 02 19:29:43 crc kubenswrapper[4792]: E1202 19:29:43.230966 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5\": container with ID starting with 714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5 not found: ID does not exist" containerID="714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.231049 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5"} err="failed to get container status \"714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5\": rpc error: code = NotFound desc = could not find container \"714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5\": container with ID starting with 714df882990bcb27e45922929568f8863c2ded2bc8936202dabd11579197f7f5 not found: ID does not exist" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.231072 4792 scope.go:117] "RemoveContainer" containerID="bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00" Dec 02 19:29:43 crc kubenswrapper[4792]: E1202 19:29:43.231438 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00\": container with ID starting with bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00 not found: ID does not exist" containerID="bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.231468 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00"} err="failed to get container status \"bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00\": rpc error: code = NotFound desc = could not find container \"bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00\": container with ID starting with bea6f959a12fbd49c64f9cc3b168de5358923315385d84bb2cc72cf74c2a2d00 not found: ID does not exist" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.231488 4792 scope.go:117] "RemoveContainer" containerID="d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a" Dec 02 19:29:43 crc kubenswrapper[4792]: E1202 19:29:43.232822 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a\": container with ID starting with d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a not found: ID does not exist" containerID="d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.232868 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a"} err="failed to get container status \"d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a\": rpc error: code = NotFound desc = could not find container \"d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a\": container with ID starting with d92c1069aad691ec1f109b2d5c5e4933443f805d646afc4da6cf9772a13d359a not found: ID does not exist" Dec 02 19:29:43 crc kubenswrapper[4792]: I1202 19:29:43.558425 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" path="/var/lib/kubelet/pods/1295ef51-62d2-4227-bd28-85b205f8a00a/volumes" Dec 02 19:29:49 crc kubenswrapper[4792]: I1202 19:29:49.557321 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:29:49 crc kubenswrapper[4792]: E1202 19:29:49.558025 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.195569 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k"] Dec 02 19:30:00 crc kubenswrapper[4792]: E1202 19:30:00.196720 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="extract-utilities" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.196740 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="extract-utilities" Dec 02 19:30:00 crc kubenswrapper[4792]: E1202 19:30:00.196778 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="extract-content" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.196786 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="extract-content" Dec 02 19:30:00 crc kubenswrapper[4792]: E1202 19:30:00.196801 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="registry-server" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.196810 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="registry-server" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.197105 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1295ef51-62d2-4227-bd28-85b205f8a00a" containerName="registry-server" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.198197 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.213281 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.213711 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.226766 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k"] Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.324482 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j57m\" (UniqueName: \"kubernetes.io/projected/11e6e9b8-0fb5-488a-8652-963b3c002f35-kube-api-access-6j57m\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.324823 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11e6e9b8-0fb5-488a-8652-963b3c002f35-config-volume\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.324991 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11e6e9b8-0fb5-488a-8652-963b3c002f35-secret-volume\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.427103 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11e6e9b8-0fb5-488a-8652-963b3c002f35-secret-volume\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.427215 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j57m\" (UniqueName: \"kubernetes.io/projected/11e6e9b8-0fb5-488a-8652-963b3c002f35-kube-api-access-6j57m\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.427311 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11e6e9b8-0fb5-488a-8652-963b3c002f35-config-volume\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.428341 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11e6e9b8-0fb5-488a-8652-963b3c002f35-config-volume\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.438660 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11e6e9b8-0fb5-488a-8652-963b3c002f35-secret-volume\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.444885 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j57m\" (UniqueName: \"kubernetes.io/projected/11e6e9b8-0fb5-488a-8652-963b3c002f35-kube-api-access-6j57m\") pod \"collect-profiles-29411730-jhh9k\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:00 crc kubenswrapper[4792]: I1202 19:30:00.527773 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:01 crc kubenswrapper[4792]: I1202 19:30:01.091821 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k"] Dec 02 19:30:01 crc kubenswrapper[4792]: I1202 19:30:01.332545 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" event={"ID":"11e6e9b8-0fb5-488a-8652-963b3c002f35","Type":"ContainerStarted","Data":"1d2265e49d25dd717a4c260181368892c8880154ef82bf072d7a92bc2c64afc6"} Dec 02 19:30:01 crc kubenswrapper[4792]: I1202 19:30:01.332876 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" event={"ID":"11e6e9b8-0fb5-488a-8652-963b3c002f35","Type":"ContainerStarted","Data":"dfc6c497f55bffb5e3acca48fc4bb709aa2451985aab9b885aa83e145a00622d"} Dec 02 19:30:01 crc kubenswrapper[4792]: I1202 19:30:01.357560 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" podStartSLOduration=1.357517109 podStartE2EDuration="1.357517109s" podCreationTimestamp="2025-12-02 19:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:30:01.347021874 +0000 UTC m=+3232.119914202" watchObservedRunningTime="2025-12-02 19:30:01.357517109 +0000 UTC m=+3232.130409437" Dec 02 19:30:02 crc kubenswrapper[4792]: I1202 19:30:02.347987 4792 generic.go:334] "Generic (PLEG): container finished" podID="11e6e9b8-0fb5-488a-8652-963b3c002f35" containerID="1d2265e49d25dd717a4c260181368892c8880154ef82bf072d7a92bc2c64afc6" exitCode=0 Dec 02 19:30:02 crc kubenswrapper[4792]: I1202 19:30:02.348060 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" event={"ID":"11e6e9b8-0fb5-488a-8652-963b3c002f35","Type":"ContainerDied","Data":"1d2265e49d25dd717a4c260181368892c8880154ef82bf072d7a92bc2c64afc6"} Dec 02 19:30:03 crc kubenswrapper[4792]: I1202 19:30:03.802258 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:03 crc kubenswrapper[4792]: I1202 19:30:03.901385 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11e6e9b8-0fb5-488a-8652-963b3c002f35-secret-volume\") pod \"11e6e9b8-0fb5-488a-8652-963b3c002f35\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " Dec 02 19:30:03 crc kubenswrapper[4792]: I1202 19:30:03.901547 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11e6e9b8-0fb5-488a-8652-963b3c002f35-config-volume\") pod \"11e6e9b8-0fb5-488a-8652-963b3c002f35\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " Dec 02 19:30:03 crc kubenswrapper[4792]: I1202 19:30:03.901849 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j57m\" (UniqueName: \"kubernetes.io/projected/11e6e9b8-0fb5-488a-8652-963b3c002f35-kube-api-access-6j57m\") pod \"11e6e9b8-0fb5-488a-8652-963b3c002f35\" (UID: \"11e6e9b8-0fb5-488a-8652-963b3c002f35\") " Dec 02 19:30:03 crc kubenswrapper[4792]: I1202 19:30:03.902474 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11e6e9b8-0fb5-488a-8652-963b3c002f35-config-volume" (OuterVolumeSpecName: "config-volume") pod "11e6e9b8-0fb5-488a-8652-963b3c002f35" (UID: "11e6e9b8-0fb5-488a-8652-963b3c002f35"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:30:03 crc kubenswrapper[4792]: I1202 19:30:03.909258 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11e6e9b8-0fb5-488a-8652-963b3c002f35-kube-api-access-6j57m" (OuterVolumeSpecName: "kube-api-access-6j57m") pod "11e6e9b8-0fb5-488a-8652-963b3c002f35" (UID: "11e6e9b8-0fb5-488a-8652-963b3c002f35"). InnerVolumeSpecName "kube-api-access-6j57m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:30:03 crc kubenswrapper[4792]: I1202 19:30:03.911957 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11e6e9b8-0fb5-488a-8652-963b3c002f35-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "11e6e9b8-0fb5-488a-8652-963b3c002f35" (UID: "11e6e9b8-0fb5-488a-8652-963b3c002f35"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.003827 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j57m\" (UniqueName: \"kubernetes.io/projected/11e6e9b8-0fb5-488a-8652-963b3c002f35-kube-api-access-6j57m\") on node \"crc\" DevicePath \"\"" Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.004050 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/11e6e9b8-0fb5-488a-8652-963b3c002f35-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.004119 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/11e6e9b8-0fb5-488a-8652-963b3c002f35-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.381969 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" event={"ID":"11e6e9b8-0fb5-488a-8652-963b3c002f35","Type":"ContainerDied","Data":"dfc6c497f55bffb5e3acca48fc4bb709aa2451985aab9b885aa83e145a00622d"} Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.382072 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfc6c497f55bffb5e3acca48fc4bb709aa2451985aab9b885aa83e145a00622d" Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.382187 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411730-jhh9k" Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.447345 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5"] Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.482612 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411685-nptp5"] Dec 02 19:30:04 crc kubenswrapper[4792]: I1202 19:30:04.540558 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:30:04 crc kubenswrapper[4792]: E1202 19:30:04.541179 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:30:05 crc kubenswrapper[4792]: I1202 19:30:05.558831 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a1557aa-b284-4da6-85e5-40405c02f4e9" path="/var/lib/kubelet/pods/2a1557aa-b284-4da6-85e5-40405c02f4e9/volumes" Dec 02 19:30:17 crc kubenswrapper[4792]: I1202 19:30:17.758612 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:30:17 crc kubenswrapper[4792]: E1202 19:30:17.759826 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:30:30 crc kubenswrapper[4792]: I1202 19:30:30.540485 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:30:30 crc kubenswrapper[4792]: E1202 19:30:30.541372 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:30:38 crc kubenswrapper[4792]: I1202 19:30:38.352107 4792 scope.go:117] "RemoveContainer" containerID="1f46444b1b93ad5fd5c3b2fc1e4ded3d3e973568f49cf1d9d555c8179d3f1168" Dec 02 19:30:42 crc kubenswrapper[4792]: I1202 19:30:42.541269 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:30:42 crc kubenswrapper[4792]: I1202 19:30:42.798937 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"1316731db97579edd6c406f74dd573b0d2f7753051feb84c8996194fbf2c5381"} Dec 02 19:33:08 crc kubenswrapper[4792]: I1202 19:33:08.082879 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:33:08 crc kubenswrapper[4792]: I1202 19:33:08.083565 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:33:38 crc kubenswrapper[4792]: I1202 19:33:38.080960 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:33:38 crc kubenswrapper[4792]: I1202 19:33:38.081415 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:34:08 crc kubenswrapper[4792]: I1202 19:34:08.082362 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:34:08 crc kubenswrapper[4792]: I1202 19:34:08.083704 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:34:08 crc kubenswrapper[4792]: I1202 19:34:08.083777 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:34:08 crc kubenswrapper[4792]: I1202 19:34:08.085263 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1316731db97579edd6c406f74dd573b0d2f7753051feb84c8996194fbf2c5381"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:34:08 crc kubenswrapper[4792]: I1202 19:34:08.085403 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://1316731db97579edd6c406f74dd573b0d2f7753051feb84c8996194fbf2c5381" gracePeriod=600 Dec 02 19:34:09 crc kubenswrapper[4792]: I1202 19:34:09.020387 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="1316731db97579edd6c406f74dd573b0d2f7753051feb84c8996194fbf2c5381" exitCode=0 Dec 02 19:34:09 crc kubenswrapper[4792]: I1202 19:34:09.020589 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"1316731db97579edd6c406f74dd573b0d2f7753051feb84c8996194fbf2c5381"} Dec 02 19:34:09 crc kubenswrapper[4792]: I1202 19:34:09.020941 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49"} Dec 02 19:34:09 crc kubenswrapper[4792]: I1202 19:34:09.020965 4792 scope.go:117] "RemoveContainer" containerID="d10baa4abdd1b48b5e4fd474432bcd290c613ab0dd069024ee83a680d770775a" Dec 02 19:34:46 crc kubenswrapper[4792]: I1202 19:34:46.423644 4792 generic.go:334] "Generic (PLEG): container finished" podID="22c610ff-ae47-48ac-8fea-c3ab17f23106" containerID="abf98db9b5e0eba946d05b230d4940f41b111de60f19b839e7f488296d6d94b9" exitCode=0 Dec 02 19:34:46 crc kubenswrapper[4792]: I1202 19:34:46.423745 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"22c610ff-ae47-48ac-8fea-c3ab17f23106","Type":"ContainerDied","Data":"abf98db9b5e0eba946d05b230d4940f41b111de60f19b839e7f488296d6d94b9"} Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.025354 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076269 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config-secret\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076358 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9lxr\" (UniqueName: \"kubernetes.io/projected/22c610ff-ae47-48ac-8fea-c3ab17f23106-kube-api-access-j9lxr\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076421 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-config-data\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076612 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ssh-key\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076661 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-temporary\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076699 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-workdir\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076755 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076851 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.076939 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ca-certs\") pod \"22c610ff-ae47-48ac-8fea-c3ab17f23106\" (UID: \"22c610ff-ae47-48ac-8fea-c3ab17f23106\") " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.081012 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-config-data" (OuterVolumeSpecName: "config-data") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.081832 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.102342 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "test-operator-logs") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.103714 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c610ff-ae47-48ac-8fea-c3ab17f23106-kube-api-access-j9lxr" (OuterVolumeSpecName: "kube-api-access-j9lxr") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "kube-api-access-j9lxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.114985 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.115785 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.143119 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.146840 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179597 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179629 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9lxr\" (UniqueName: \"kubernetes.io/projected/22c610ff-ae47-48ac-8fea-c3ab17f23106-kube-api-access-j9lxr\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179639 4792 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179649 4792 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179658 4792 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179682 4792 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179691 4792 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/22c610ff-ae47-48ac-8fea-c3ab17f23106-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.179701 4792 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/22c610ff-ae47-48ac-8fea-c3ab17f23106-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.202039 4792 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.282813 4792 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.446023 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"22c610ff-ae47-48ac-8fea-c3ab17f23106","Type":"ContainerDied","Data":"1eeb87c1b30573491fd8daf540f8d309fe2805ef10cfb33caf7ccc4d93009407"} Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.446401 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1eeb87c1b30573491fd8daf540f8d309fe2805ef10cfb33caf7ccc4d93009407" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.446091 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.516069 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "22c610ff-ae47-48ac-8fea-c3ab17f23106" (UID: "22c610ff-ae47-48ac-8fea-c3ab17f23106"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:34:48 crc kubenswrapper[4792]: I1202 19:34:48.588918 4792 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/22c610ff-ae47-48ac-8fea-c3ab17f23106-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.587579 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 19:34:50 crc kubenswrapper[4792]: E1202 19:34:50.588459 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22c610ff-ae47-48ac-8fea-c3ab17f23106" containerName="tempest-tests-tempest-tests-runner" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.588475 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="22c610ff-ae47-48ac-8fea-c3ab17f23106" containerName="tempest-tests-tempest-tests-runner" Dec 02 19:34:50 crc kubenswrapper[4792]: E1202 19:34:50.588497 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11e6e9b8-0fb5-488a-8652-963b3c002f35" containerName="collect-profiles" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.588505 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="11e6e9b8-0fb5-488a-8652-963b3c002f35" containerName="collect-profiles" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.588790 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="11e6e9b8-0fb5-488a-8652-963b3c002f35" containerName="collect-profiles" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.588818 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="22c610ff-ae47-48ac-8fea-c3ab17f23106" containerName="tempest-tests-tempest-tests-runner" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.590007 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.592454 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-5k6l8" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.596925 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.742027 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbww4\" (UniqueName: \"kubernetes.io/projected/9484b035-7d4b-487e-b070-f935fb55389e-kube-api-access-qbww4\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9484b035-7d4b-487e-b070-f935fb55389e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.743210 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9484b035-7d4b-487e-b070-f935fb55389e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.845744 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbww4\" (UniqueName: \"kubernetes.io/projected/9484b035-7d4b-487e-b070-f935fb55389e-kube-api-access-qbww4\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9484b035-7d4b-487e-b070-f935fb55389e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.845918 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9484b035-7d4b-487e-b070-f935fb55389e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.846403 4792 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9484b035-7d4b-487e-b070-f935fb55389e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.868796 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbww4\" (UniqueName: \"kubernetes.io/projected/9484b035-7d4b-487e-b070-f935fb55389e-kube-api-access-qbww4\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9484b035-7d4b-487e-b070-f935fb55389e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.876244 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"9484b035-7d4b-487e-b070-f935fb55389e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:50 crc kubenswrapper[4792]: I1202 19:34:50.907720 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 19:34:51 crc kubenswrapper[4792]: I1202 19:34:51.391973 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 19:34:51 crc kubenswrapper[4792]: I1202 19:34:51.398550 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:34:51 crc kubenswrapper[4792]: I1202 19:34:51.476398 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9484b035-7d4b-487e-b070-f935fb55389e","Type":"ContainerStarted","Data":"f3b5a766732296ab782b590a454e576e2e9925dcae5b99c15197baecac8fe009"} Dec 02 19:34:53 crc kubenswrapper[4792]: I1202 19:34:53.499046 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"9484b035-7d4b-487e-b070-f935fb55389e","Type":"ContainerStarted","Data":"75eabd7b48bfef7373ecdf82ac2d56982c2989012561b31e23ca6cb75b64c74a"} Dec 02 19:34:53 crc kubenswrapper[4792]: I1202 19:34:53.517791 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.609116927 podStartE2EDuration="3.517772219s" podCreationTimestamp="2025-12-02 19:34:50 +0000 UTC" firstStartedPulling="2025-12-02 19:34:51.398279976 +0000 UTC m=+3522.171172314" lastFinishedPulling="2025-12-02 19:34:52.306935278 +0000 UTC m=+3523.079827606" observedRunningTime="2025-12-02 19:34:53.510024316 +0000 UTC m=+3524.282916654" watchObservedRunningTime="2025-12-02 19:34:53.517772219 +0000 UTC m=+3524.290664547" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.052808 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5gtvr"] Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.056087 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.064862 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gtvr"] Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.142898 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-utilities\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.143051 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-catalog-content\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.143242 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpg9t\" (UniqueName: \"kubernetes.io/projected/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-kube-api-access-zpg9t\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.244961 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpg9t\" (UniqueName: \"kubernetes.io/projected/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-kube-api-access-zpg9t\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.245029 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-utilities\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.245135 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-catalog-content\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.245778 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-utilities\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.245797 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-catalog-content\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.273667 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpg9t\" (UniqueName: \"kubernetes.io/projected/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-kube-api-access-zpg9t\") pod \"redhat-marketplace-5gtvr\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.376154 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:12 crc kubenswrapper[4792]: I1202 19:35:12.893785 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gtvr"] Dec 02 19:35:13 crc kubenswrapper[4792]: I1202 19:35:13.734168 4792 generic.go:334] "Generic (PLEG): container finished" podID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerID="badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a" exitCode=0 Dec 02 19:35:13 crc kubenswrapper[4792]: I1202 19:35:13.734281 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gtvr" event={"ID":"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a","Type":"ContainerDied","Data":"badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a"} Dec 02 19:35:13 crc kubenswrapper[4792]: I1202 19:35:13.734485 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gtvr" event={"ID":"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a","Type":"ContainerStarted","Data":"2039f4070e0be2ae96cc1b8efd988fe751db852ab9ed94b6b907685d8a97635c"} Dec 02 19:35:14 crc kubenswrapper[4792]: I1202 19:35:14.743942 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gtvr" event={"ID":"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a","Type":"ContainerStarted","Data":"c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694"} Dec 02 19:35:15 crc kubenswrapper[4792]: I1202 19:35:15.754067 4792 generic.go:334] "Generic (PLEG): container finished" podID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerID="c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694" exitCode=0 Dec 02 19:35:15 crc kubenswrapper[4792]: I1202 19:35:15.754166 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gtvr" event={"ID":"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a","Type":"ContainerDied","Data":"c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694"} Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.468720 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85rz5/must-gather-hbkj7"] Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.470788 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.475816 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-85rz5"/"openshift-service-ca.crt" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.475828 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-85rz5"/"default-dockercfg-9b8zj" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.475824 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-85rz5"/"kube-root-ca.crt" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.502539 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-85rz5/must-gather-hbkj7"] Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.526692 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-must-gather-output\") pod \"must-gather-hbkj7\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.526842 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k86mb\" (UniqueName: \"kubernetes.io/projected/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-kube-api-access-k86mb\") pod \"must-gather-hbkj7\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.628123 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-must-gather-output\") pod \"must-gather-hbkj7\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.628297 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k86mb\" (UniqueName: \"kubernetes.io/projected/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-kube-api-access-k86mb\") pod \"must-gather-hbkj7\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.628551 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-must-gather-output\") pod \"must-gather-hbkj7\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.648936 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k86mb\" (UniqueName: \"kubernetes.io/projected/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-kube-api-access-k86mb\") pod \"must-gather-hbkj7\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.780642 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gtvr" event={"ID":"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a","Type":"ContainerStarted","Data":"b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e"} Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.785859 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:35:16 crc kubenswrapper[4792]: I1202 19:35:16.816687 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5gtvr" podStartSLOduration=2.366947025 podStartE2EDuration="4.816670591s" podCreationTimestamp="2025-12-02 19:35:12 +0000 UTC" firstStartedPulling="2025-12-02 19:35:13.736285788 +0000 UTC m=+3544.509178116" lastFinishedPulling="2025-12-02 19:35:16.186009354 +0000 UTC m=+3546.958901682" observedRunningTime="2025-12-02 19:35:16.800883767 +0000 UTC m=+3547.573776105" watchObservedRunningTime="2025-12-02 19:35:16.816670591 +0000 UTC m=+3547.589562919" Dec 02 19:35:17 crc kubenswrapper[4792]: I1202 19:35:17.551667 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-85rz5/must-gather-hbkj7"] Dec 02 19:35:17 crc kubenswrapper[4792]: I1202 19:35:17.790721 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/must-gather-hbkj7" event={"ID":"6090e5ac-472b-4d4a-9a37-cfd91d7f1411","Type":"ContainerStarted","Data":"7b3cb322f1f3b6d77306a173ed1dd5e128820e8e40a0c3638c65dda3766e3135"} Dec 02 19:35:22 crc kubenswrapper[4792]: I1202 19:35:22.377332 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:22 crc kubenswrapper[4792]: I1202 19:35:22.378052 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:22 crc kubenswrapper[4792]: I1202 19:35:22.434970 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:22 crc kubenswrapper[4792]: I1202 19:35:22.901061 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:22 crc kubenswrapper[4792]: I1202 19:35:22.979887 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gtvr"] Dec 02 19:35:23 crc kubenswrapper[4792]: I1202 19:35:23.865851 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/must-gather-hbkj7" event={"ID":"6090e5ac-472b-4d4a-9a37-cfd91d7f1411","Type":"ContainerStarted","Data":"235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2"} Dec 02 19:35:23 crc kubenswrapper[4792]: I1202 19:35:23.866149 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/must-gather-hbkj7" event={"ID":"6090e5ac-472b-4d4a-9a37-cfd91d7f1411","Type":"ContainerStarted","Data":"bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54"} Dec 02 19:35:23 crc kubenswrapper[4792]: I1202 19:35:23.881506 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-85rz5/must-gather-hbkj7" podStartSLOduration=2.463145918 podStartE2EDuration="7.881488518s" podCreationTimestamp="2025-12-02 19:35:16 +0000 UTC" firstStartedPulling="2025-12-02 19:35:17.548475158 +0000 UTC m=+3548.321367486" lastFinishedPulling="2025-12-02 19:35:22.966817748 +0000 UTC m=+3553.739710086" observedRunningTime="2025-12-02 19:35:23.879012383 +0000 UTC m=+3554.651904711" watchObservedRunningTime="2025-12-02 19:35:23.881488518 +0000 UTC m=+3554.654380856" Dec 02 19:35:24 crc kubenswrapper[4792]: I1202 19:35:24.878960 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5gtvr" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="registry-server" containerID="cri-o://b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e" gracePeriod=2 Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.675586 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.795883 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-utilities\") pod \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.796129 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpg9t\" (UniqueName: \"kubernetes.io/projected/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-kube-api-access-zpg9t\") pod \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.796175 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-catalog-content\") pod \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\" (UID: \"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a\") " Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.797212 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-utilities" (OuterVolumeSpecName: "utilities") pod "3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" (UID: "3d2f8cb1-f92b-4e8d-865f-b65005f4e27a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.803058 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-kube-api-access-zpg9t" (OuterVolumeSpecName: "kube-api-access-zpg9t") pod "3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" (UID: "3d2f8cb1-f92b-4e8d-865f-b65005f4e27a"). InnerVolumeSpecName "kube-api-access-zpg9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.817075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" (UID: "3d2f8cb1-f92b-4e8d-865f-b65005f4e27a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.890076 4792 generic.go:334] "Generic (PLEG): container finished" podID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerID="b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e" exitCode=0 Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.890124 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gtvr" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.890182 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gtvr" event={"ID":"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a","Type":"ContainerDied","Data":"b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e"} Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.890267 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gtvr" event={"ID":"3d2f8cb1-f92b-4e8d-865f-b65005f4e27a","Type":"ContainerDied","Data":"2039f4070e0be2ae96cc1b8efd988fe751db852ab9ed94b6b907685d8a97635c"} Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.890334 4792 scope.go:117] "RemoveContainer" containerID="b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.898616 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpg9t\" (UniqueName: \"kubernetes.io/projected/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-kube-api-access-zpg9t\") on node \"crc\" DevicePath \"\"" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.898652 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.898665 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.916733 4792 scope.go:117] "RemoveContainer" containerID="c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.932858 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gtvr"] Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.945339 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gtvr"] Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.947156 4792 scope.go:117] "RemoveContainer" containerID="badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.993641 4792 scope.go:117] "RemoveContainer" containerID="b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e" Dec 02 19:35:25 crc kubenswrapper[4792]: E1202 19:35:25.994136 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e\": container with ID starting with b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e not found: ID does not exist" containerID="b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.994181 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e"} err="failed to get container status \"b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e\": rpc error: code = NotFound desc = could not find container \"b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e\": container with ID starting with b97ea401d26d20d7c9d6e956aba76fd971eca08b95723337c6a771609f1b9d8e not found: ID does not exist" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.994219 4792 scope.go:117] "RemoveContainer" containerID="c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694" Dec 02 19:35:25 crc kubenswrapper[4792]: E1202 19:35:25.994628 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694\": container with ID starting with c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694 not found: ID does not exist" containerID="c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.994680 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694"} err="failed to get container status \"c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694\": rpc error: code = NotFound desc = could not find container \"c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694\": container with ID starting with c2732cb749fea40d8a5c581f0489058c971121089a1f084b50d1cae17684f694 not found: ID does not exist" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.994712 4792 scope.go:117] "RemoveContainer" containerID="badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a" Dec 02 19:35:25 crc kubenswrapper[4792]: E1202 19:35:25.995004 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a\": container with ID starting with badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a not found: ID does not exist" containerID="badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a" Dec 02 19:35:25 crc kubenswrapper[4792]: I1202 19:35:25.995037 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a"} err="failed to get container status \"badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a\": rpc error: code = NotFound desc = could not find container \"badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a\": container with ID starting with badf9ac7122c6f2584b8aaac1670b5215a827a0824f927799d79e54d11dee34a not found: ID does not exist" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.451630 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85rz5/crc-debug-lmb4q"] Dec 02 19:35:27 crc kubenswrapper[4792]: E1202 19:35:27.452226 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="extract-content" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.452238 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="extract-content" Dec 02 19:35:27 crc kubenswrapper[4792]: E1202 19:35:27.452256 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="extract-utilities" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.452261 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="extract-utilities" Dec 02 19:35:27 crc kubenswrapper[4792]: E1202 19:35:27.452287 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="registry-server" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.452294 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="registry-server" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.452476 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" containerName="registry-server" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.453164 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.557818 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d2f8cb1-f92b-4e8d-865f-b65005f4e27a" path="/var/lib/kubelet/pods/3d2f8cb1-f92b-4e8d-865f-b65005f4e27a/volumes" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.636427 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x462v\" (UniqueName: \"kubernetes.io/projected/99be21e1-14dc-4541-bc54-9bc290f6ffca-kube-api-access-x462v\") pod \"crc-debug-lmb4q\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.637014 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99be21e1-14dc-4541-bc54-9bc290f6ffca-host\") pod \"crc-debug-lmb4q\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.739666 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99be21e1-14dc-4541-bc54-9bc290f6ffca-host\") pod \"crc-debug-lmb4q\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.739762 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x462v\" (UniqueName: \"kubernetes.io/projected/99be21e1-14dc-4541-bc54-9bc290f6ffca-kube-api-access-x462v\") pod \"crc-debug-lmb4q\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.740504 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99be21e1-14dc-4541-bc54-9bc290f6ffca-host\") pod \"crc-debug-lmb4q\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:27 crc kubenswrapper[4792]: I1202 19:35:27.775396 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x462v\" (UniqueName: \"kubernetes.io/projected/99be21e1-14dc-4541-bc54-9bc290f6ffca-kube-api-access-x462v\") pod \"crc-debug-lmb4q\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:28 crc kubenswrapper[4792]: I1202 19:35:28.070052 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:35:28 crc kubenswrapper[4792]: I1202 19:35:28.920625 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" event={"ID":"99be21e1-14dc-4541-bc54-9bc290f6ffca","Type":"ContainerStarted","Data":"d3d778728c40e1d2ec11b65ae6c666236ce3e9646353c39a9116cfa03c78bdbc"} Dec 02 19:35:42 crc kubenswrapper[4792]: I1202 19:35:42.054160 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" event={"ID":"99be21e1-14dc-4541-bc54-9bc290f6ffca","Type":"ContainerStarted","Data":"622c4d347d7d7aa711ea40da7063dce05b4d9f844778689d7781b9b5253138b4"} Dec 02 19:35:42 crc kubenswrapper[4792]: I1202 19:35:42.079151 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" podStartSLOduration=2.353130317 podStartE2EDuration="15.079135739s" podCreationTimestamp="2025-12-02 19:35:27 +0000 UTC" firstStartedPulling="2025-12-02 19:35:28.111604191 +0000 UTC m=+3558.884496519" lastFinishedPulling="2025-12-02 19:35:40.837609613 +0000 UTC m=+3571.610501941" observedRunningTime="2025-12-02 19:35:42.073755798 +0000 UTC m=+3572.846648126" watchObservedRunningTime="2025-12-02 19:35:42.079135739 +0000 UTC m=+3572.852028067" Dec 02 19:36:08 crc kubenswrapper[4792]: I1202 19:36:08.082012 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:36:08 crc kubenswrapper[4792]: I1202 19:36:08.082566 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:36:21 crc kubenswrapper[4792]: I1202 19:36:21.486102 4792 generic.go:334] "Generic (PLEG): container finished" podID="99be21e1-14dc-4541-bc54-9bc290f6ffca" containerID="622c4d347d7d7aa711ea40da7063dce05b4d9f844778689d7781b9b5253138b4" exitCode=0 Dec 02 19:36:21 crc kubenswrapper[4792]: I1202 19:36:21.486189 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" event={"ID":"99be21e1-14dc-4541-bc54-9bc290f6ffca","Type":"ContainerDied","Data":"622c4d347d7d7aa711ea40da7063dce05b4d9f844778689d7781b9b5253138b4"} Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.602910 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.640145 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85rz5/crc-debug-lmb4q"] Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.650276 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85rz5/crc-debug-lmb4q"] Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.690983 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x462v\" (UniqueName: \"kubernetes.io/projected/99be21e1-14dc-4541-bc54-9bc290f6ffca-kube-api-access-x462v\") pod \"99be21e1-14dc-4541-bc54-9bc290f6ffca\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.691200 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99be21e1-14dc-4541-bc54-9bc290f6ffca-host\") pod \"99be21e1-14dc-4541-bc54-9bc290f6ffca\" (UID: \"99be21e1-14dc-4541-bc54-9bc290f6ffca\") " Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.691337 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/99be21e1-14dc-4541-bc54-9bc290f6ffca-host" (OuterVolumeSpecName: "host") pod "99be21e1-14dc-4541-bc54-9bc290f6ffca" (UID: "99be21e1-14dc-4541-bc54-9bc290f6ffca"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.692000 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99be21e1-14dc-4541-bc54-9bc290f6ffca-host\") on node \"crc\" DevicePath \"\"" Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.696675 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99be21e1-14dc-4541-bc54-9bc290f6ffca-kube-api-access-x462v" (OuterVolumeSpecName: "kube-api-access-x462v") pod "99be21e1-14dc-4541-bc54-9bc290f6ffca" (UID: "99be21e1-14dc-4541-bc54-9bc290f6ffca"). InnerVolumeSpecName "kube-api-access-x462v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:36:22 crc kubenswrapper[4792]: I1202 19:36:22.794185 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x462v\" (UniqueName: \"kubernetes.io/projected/99be21e1-14dc-4541-bc54-9bc290f6ffca-kube-api-access-x462v\") on node \"crc\" DevicePath \"\"" Dec 02 19:36:23 crc kubenswrapper[4792]: I1202 19:36:23.506141 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3d778728c40e1d2ec11b65ae6c666236ce3e9646353c39a9116cfa03c78bdbc" Dec 02 19:36:23 crc kubenswrapper[4792]: I1202 19:36:23.506263 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-lmb4q" Dec 02 19:36:23 crc kubenswrapper[4792]: I1202 19:36:23.550120 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99be21e1-14dc-4541-bc54-9bc290f6ffca" path="/var/lib/kubelet/pods/99be21e1-14dc-4541-bc54-9bc290f6ffca/volumes" Dec 02 19:36:23 crc kubenswrapper[4792]: I1202 19:36:23.859832 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85rz5/crc-debug-dg4lr"] Dec 02 19:36:23 crc kubenswrapper[4792]: E1202 19:36:23.860233 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99be21e1-14dc-4541-bc54-9bc290f6ffca" containerName="container-00" Dec 02 19:36:23 crc kubenswrapper[4792]: I1202 19:36:23.860244 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="99be21e1-14dc-4541-bc54-9bc290f6ffca" containerName="container-00" Dec 02 19:36:23 crc kubenswrapper[4792]: I1202 19:36:23.860473 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="99be21e1-14dc-4541-bc54-9bc290f6ffca" containerName="container-00" Dec 02 19:36:23 crc kubenswrapper[4792]: I1202 19:36:23.861418 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.016064 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw6bv\" (UniqueName: \"kubernetes.io/projected/e565c701-5026-48b5-b89d-160098224c85-kube-api-access-qw6bv\") pod \"crc-debug-dg4lr\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.016213 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e565c701-5026-48b5-b89d-160098224c85-host\") pod \"crc-debug-dg4lr\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.117665 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw6bv\" (UniqueName: \"kubernetes.io/projected/e565c701-5026-48b5-b89d-160098224c85-kube-api-access-qw6bv\") pod \"crc-debug-dg4lr\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.117771 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e565c701-5026-48b5-b89d-160098224c85-host\") pod \"crc-debug-dg4lr\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.117900 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e565c701-5026-48b5-b89d-160098224c85-host\") pod \"crc-debug-dg4lr\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.139889 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw6bv\" (UniqueName: \"kubernetes.io/projected/e565c701-5026-48b5-b89d-160098224c85-kube-api-access-qw6bv\") pod \"crc-debug-dg4lr\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.186019 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.517335 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" event={"ID":"e565c701-5026-48b5-b89d-160098224c85","Type":"ContainerStarted","Data":"91ebe72289a983b44a34533442d638e5f53392c99323fd55c98c5ff8a40e1431"} Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.517685 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" event={"ID":"e565c701-5026-48b5-b89d-160098224c85","Type":"ContainerStarted","Data":"2dc590a8dd8d72f1581f5755a3d3571a614f23a9b22d19ff83bb5505decc9b6b"} Dec 02 19:36:24 crc kubenswrapper[4792]: I1202 19:36:24.558431 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" podStartSLOduration=1.558406775 podStartE2EDuration="1.558406775s" podCreationTimestamp="2025-12-02 19:36:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:36:24.534148409 +0000 UTC m=+3615.307040747" watchObservedRunningTime="2025-12-02 19:36:24.558406775 +0000 UTC m=+3615.331299123" Dec 02 19:36:25 crc kubenswrapper[4792]: I1202 19:36:25.528247 4792 generic.go:334] "Generic (PLEG): container finished" podID="e565c701-5026-48b5-b89d-160098224c85" containerID="91ebe72289a983b44a34533442d638e5f53392c99323fd55c98c5ff8a40e1431" exitCode=0 Dec 02 19:36:25 crc kubenswrapper[4792]: I1202 19:36:25.528306 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" event={"ID":"e565c701-5026-48b5-b89d-160098224c85","Type":"ContainerDied","Data":"91ebe72289a983b44a34533442d638e5f53392c99323fd55c98c5ff8a40e1431"} Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.687165 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.744980 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85rz5/crc-debug-dg4lr"] Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.763656 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85rz5/crc-debug-dg4lr"] Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.772406 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e565c701-5026-48b5-b89d-160098224c85-host\") pod \"e565c701-5026-48b5-b89d-160098224c85\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.772605 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e565c701-5026-48b5-b89d-160098224c85-host" (OuterVolumeSpecName: "host") pod "e565c701-5026-48b5-b89d-160098224c85" (UID: "e565c701-5026-48b5-b89d-160098224c85"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.772708 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw6bv\" (UniqueName: \"kubernetes.io/projected/e565c701-5026-48b5-b89d-160098224c85-kube-api-access-qw6bv\") pod \"e565c701-5026-48b5-b89d-160098224c85\" (UID: \"e565c701-5026-48b5-b89d-160098224c85\") " Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.773134 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e565c701-5026-48b5-b89d-160098224c85-host\") on node \"crc\" DevicePath \"\"" Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.781820 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e565c701-5026-48b5-b89d-160098224c85-kube-api-access-qw6bv" (OuterVolumeSpecName: "kube-api-access-qw6bv") pod "e565c701-5026-48b5-b89d-160098224c85" (UID: "e565c701-5026-48b5-b89d-160098224c85"). InnerVolumeSpecName "kube-api-access-qw6bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:36:26 crc kubenswrapper[4792]: I1202 19:36:26.875054 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw6bv\" (UniqueName: \"kubernetes.io/projected/e565c701-5026-48b5-b89d-160098224c85-kube-api-access-qw6bv\") on node \"crc\" DevicePath \"\"" Dec 02 19:36:27 crc kubenswrapper[4792]: I1202 19:36:27.548133 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-dg4lr" Dec 02 19:36:27 crc kubenswrapper[4792]: I1202 19:36:27.549703 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e565c701-5026-48b5-b89d-160098224c85" path="/var/lib/kubelet/pods/e565c701-5026-48b5-b89d-160098224c85/volumes" Dec 02 19:36:27 crc kubenswrapper[4792]: I1202 19:36:27.550285 4792 scope.go:117] "RemoveContainer" containerID="91ebe72289a983b44a34533442d638e5f53392c99323fd55c98c5ff8a40e1431" Dec 02 19:36:27 crc kubenswrapper[4792]: I1202 19:36:27.948455 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-85rz5/crc-debug-f68nj"] Dec 02 19:36:27 crc kubenswrapper[4792]: E1202 19:36:27.949338 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e565c701-5026-48b5-b89d-160098224c85" containerName="container-00" Dec 02 19:36:27 crc kubenswrapper[4792]: I1202 19:36:27.949353 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e565c701-5026-48b5-b89d-160098224c85" containerName="container-00" Dec 02 19:36:27 crc kubenswrapper[4792]: I1202 19:36:27.949610 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e565c701-5026-48b5-b89d-160098224c85" containerName="container-00" Dec 02 19:36:27 crc kubenswrapper[4792]: I1202 19:36:27.959981 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.098864 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h9x9\" (UniqueName: \"kubernetes.io/projected/1d977170-bce4-4bf7-bc14-7037a294b3f6-kube-api-access-2h9x9\") pod \"crc-debug-f68nj\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.099201 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d977170-bce4-4bf7-bc14-7037a294b3f6-host\") pod \"crc-debug-f68nj\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.200772 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d977170-bce4-4bf7-bc14-7037a294b3f6-host\") pod \"crc-debug-f68nj\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.200897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h9x9\" (UniqueName: \"kubernetes.io/projected/1d977170-bce4-4bf7-bc14-7037a294b3f6-kube-api-access-2h9x9\") pod \"crc-debug-f68nj\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.201027 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d977170-bce4-4bf7-bc14-7037a294b3f6-host\") pod \"crc-debug-f68nj\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.219268 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h9x9\" (UniqueName: \"kubernetes.io/projected/1d977170-bce4-4bf7-bc14-7037a294b3f6-kube-api-access-2h9x9\") pod \"crc-debug-f68nj\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.280638 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:28 crc kubenswrapper[4792]: W1202 19:36:28.329631 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d977170_bce4_4bf7_bc14_7037a294b3f6.slice/crio-c61da9e6f522245e180f8ca04fca6e546c566e766fbcfeda3c74dc67cc25b81e WatchSource:0}: Error finding container c61da9e6f522245e180f8ca04fca6e546c566e766fbcfeda3c74dc67cc25b81e: Status 404 returned error can't find the container with id c61da9e6f522245e180f8ca04fca6e546c566e766fbcfeda3c74dc67cc25b81e Dec 02 19:36:28 crc kubenswrapper[4792]: I1202 19:36:28.558341 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-f68nj" event={"ID":"1d977170-bce4-4bf7-bc14-7037a294b3f6","Type":"ContainerStarted","Data":"c61da9e6f522245e180f8ca04fca6e546c566e766fbcfeda3c74dc67cc25b81e"} Dec 02 19:36:28 crc kubenswrapper[4792]: E1202 19:36:28.809628 4792 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d977170_bce4_4bf7_bc14_7037a294b3f6.slice/crio-conmon-5a4f882f28ad40754bf3d1abe266b55f57d6af96e5e87f0f2c08dbef3e1c1a30.scope\": RecentStats: unable to find data in memory cache]" Dec 02 19:36:29 crc kubenswrapper[4792]: I1202 19:36:29.573259 4792 generic.go:334] "Generic (PLEG): container finished" podID="1d977170-bce4-4bf7-bc14-7037a294b3f6" containerID="5a4f882f28ad40754bf3d1abe266b55f57d6af96e5e87f0f2c08dbef3e1c1a30" exitCode=0 Dec 02 19:36:29 crc kubenswrapper[4792]: I1202 19:36:29.573557 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/crc-debug-f68nj" event={"ID":"1d977170-bce4-4bf7-bc14-7037a294b3f6","Type":"ContainerDied","Data":"5a4f882f28ad40754bf3d1abe266b55f57d6af96e5e87f0f2c08dbef3e1c1a30"} Dec 02 19:36:29 crc kubenswrapper[4792]: I1202 19:36:29.623142 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85rz5/crc-debug-f68nj"] Dec 02 19:36:29 crc kubenswrapper[4792]: I1202 19:36:29.635225 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85rz5/crc-debug-f68nj"] Dec 02 19:36:30 crc kubenswrapper[4792]: I1202 19:36:30.682804 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:30 crc kubenswrapper[4792]: I1202 19:36:30.755113 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d977170-bce4-4bf7-bc14-7037a294b3f6-host\") pod \"1d977170-bce4-4bf7-bc14-7037a294b3f6\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " Dec 02 19:36:30 crc kubenswrapper[4792]: I1202 19:36:30.755244 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d977170-bce4-4bf7-bc14-7037a294b3f6-host" (OuterVolumeSpecName: "host") pod "1d977170-bce4-4bf7-bc14-7037a294b3f6" (UID: "1d977170-bce4-4bf7-bc14-7037a294b3f6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 19:36:30 crc kubenswrapper[4792]: I1202 19:36:30.755409 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h9x9\" (UniqueName: \"kubernetes.io/projected/1d977170-bce4-4bf7-bc14-7037a294b3f6-kube-api-access-2h9x9\") pod \"1d977170-bce4-4bf7-bc14-7037a294b3f6\" (UID: \"1d977170-bce4-4bf7-bc14-7037a294b3f6\") " Dec 02 19:36:30 crc kubenswrapper[4792]: I1202 19:36:30.755863 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d977170-bce4-4bf7-bc14-7037a294b3f6-host\") on node \"crc\" DevicePath \"\"" Dec 02 19:36:30 crc kubenswrapper[4792]: I1202 19:36:30.768074 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d977170-bce4-4bf7-bc14-7037a294b3f6-kube-api-access-2h9x9" (OuterVolumeSpecName: "kube-api-access-2h9x9") pod "1d977170-bce4-4bf7-bc14-7037a294b3f6" (UID: "1d977170-bce4-4bf7-bc14-7037a294b3f6"). InnerVolumeSpecName "kube-api-access-2h9x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:36:30 crc kubenswrapper[4792]: I1202 19:36:30.857197 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h9x9\" (UniqueName: \"kubernetes.io/projected/1d977170-bce4-4bf7-bc14-7037a294b3f6-kube-api-access-2h9x9\") on node \"crc\" DevicePath \"\"" Dec 02 19:36:31 crc kubenswrapper[4792]: I1202 19:36:31.550800 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d977170-bce4-4bf7-bc14-7037a294b3f6" path="/var/lib/kubelet/pods/1d977170-bce4-4bf7-bc14-7037a294b3f6/volumes" Dec 02 19:36:31 crc kubenswrapper[4792]: I1202 19:36:31.603957 4792 scope.go:117] "RemoveContainer" containerID="5a4f882f28ad40754bf3d1abe266b55f57d6af96e5e87f0f2c08dbef3e1c1a30" Dec 02 19:36:31 crc kubenswrapper[4792]: I1202 19:36:31.603990 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/crc-debug-f68nj" Dec 02 19:36:38 crc kubenswrapper[4792]: I1202 19:36:38.081283 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:36:38 crc kubenswrapper[4792]: I1202 19:36:38.082822 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.227052 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/init-config-reloader/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.410658 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/init-config-reloader/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.433347 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/alertmanager/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.441080 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/config-reloader/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.591903 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75d777bcc8-l485p_260c73f4-d8d2-4178-924a-81703068a4f6/barbican-api/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.671466 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-bdff8d974-fdcc5_6e40d936-c5d2-4491-b5c5-9794c4fb73b1/barbican-keystone-listener/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.706633 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75d777bcc8-l485p_260c73f4-d8d2-4178-924a-81703068a4f6/barbican-api-log/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.920030 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-bdff8d974-fdcc5_6e40d936-c5d2-4491-b5c5-9794c4fb73b1/barbican-keystone-listener-log/0.log" Dec 02 19:36:57 crc kubenswrapper[4792]: I1202 19:36:57.962184 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6d85ccb45-9bkkd_bbaa6700-f41c-49a4-8593-d0d6ba1a6376/barbican-worker/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.022069 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6d85ccb45-9bkkd_bbaa6700-f41c-49a4-8593-d0d6ba1a6376/barbican-worker-log/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.181234 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw_e29a810b-8a51-4d2c-ab9e-61315499b272/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.350488 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/ceilometer-central-agent/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.507145 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/proxy-httpd/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.536595 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/sg-core/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.558811 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/ceilometer-notification-agent/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.748085 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2b99d2b7-ae25-4088-8cae-a3f6151e735f/cinder-api/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.759947 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2b99d2b7-ae25-4088-8cae-a3f6151e735f/cinder-api-log/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.913027 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4e9df765-ce65-43eb-bdcc-344fb7f68889/cinder-scheduler/0.log" Dec 02 19:36:58 crc kubenswrapper[4792]: I1202 19:36:58.979991 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4e9df765-ce65-43eb-bdcc-344fb7f68889/probe/0.log" Dec 02 19:36:59 crc kubenswrapper[4792]: I1202 19:36:59.090864 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_2cbb2471-1100-45f2-9279-c15ef98e34cf/cloudkitty-api-log/0.log" Dec 02 19:36:59 crc kubenswrapper[4792]: I1202 19:36:59.264560 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_2cbb2471-1100-45f2-9279-c15ef98e34cf/cloudkitty-api/0.log" Dec 02 19:36:59 crc kubenswrapper[4792]: I1202 19:36:59.309555 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_eab5f6f8-38fe-40ac-8407-4fc5044eba84/loki-compactor/0.log" Dec 02 19:36:59 crc kubenswrapper[4792]: I1202 19:36:59.714083 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-56cd74f89f-lgcfh_c11c6af2-cd99-41e8-b6cf-b86ab025bbfa/loki-distributor/0.log" Dec 02 19:36:59 crc kubenswrapper[4792]: I1202 19:36:59.742785 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-wsd5z_2f112377-5fcb-424f-9fa1-f92ab0608d82/gateway/0.log" Dec 02 19:36:59 crc kubenswrapper[4792]: I1202 19:36:59.969579 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-wz9ls_a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4/gateway/0.log" Dec 02 19:37:00 crc kubenswrapper[4792]: I1202 19:37:00.056094 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_92495185-21e2-4db2-9b49-6c2b0267c324/loki-index-gateway/0.log" Dec 02 19:37:00 crc kubenswrapper[4792]: I1202 19:37:00.540916 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-779849886d-n28fq_29663f5c-6fe7-42d5-8d53-c8d900e36a9c/loki-query-frontend/0.log" Dec 02 19:37:00 crc kubenswrapper[4792]: I1202 19:37:00.653096 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_63ad1bca-0ff4-4694-ab0a-56e8f5366d88/loki-ingester/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.057187 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b_e3237d97-ad57-4313-8210-fa48b0740a3c/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.279201 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-lrjg9_c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25/init/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.288080 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-z8695_81a6ef07-63e3-4982-896f-c40102622a62/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.339212 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-548665d79b-vwc9f_376a394c-12c9-4fa9-b24a-841a6b05ba0b/loki-querier/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.541881 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-lrjg9_c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25/init/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.592905 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-lrjg9_c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25/dnsmasq-dns/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.669192 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk_6263b59c-7edd-49eb-aac3-42fd1c5da951/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.877239 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5d2d8dfe-ec8c-4c51-850a-1e25165a6826/glance-log/0.log" Dec 02 19:37:01 crc kubenswrapper[4792]: I1202 19:37:01.960642 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5d2d8dfe-ec8c-4c51-850a-1e25165a6826/glance-httpd/0.log" Dec 02 19:37:02 crc kubenswrapper[4792]: I1202 19:37:02.193585 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c0b6301c-c44b-4f68-b11f-59e05346f689/glance-log/0.log" Dec 02 19:37:02 crc kubenswrapper[4792]: I1202 19:37:02.212827 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c0b6301c-c44b-4f68-b11f-59e05346f689/glance-httpd/0.log" Dec 02 19:37:02 crc kubenswrapper[4792]: I1202 19:37:02.260630 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx_0d0d81cf-d181-4589-ab97-56eb22868c2f/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:02 crc kubenswrapper[4792]: I1202 19:37:02.433576 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-s9m9g_927db467-4fe0-45db-bdfa-9f8de3f72259/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:02 crc kubenswrapper[4792]: I1202 19:37:02.879790 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29411701-8bqdq_7ba4a311-3a7b-4575-b183-12bb721e71a1/keystone-cron/0.log" Dec 02 19:37:02 crc kubenswrapper[4792]: I1202 19:37:02.940943 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1/cloudkitty-proc/0.log" Dec 02 19:37:03 crc kubenswrapper[4792]: I1202 19:37:03.028889 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-54b9cc4f54-2nnzj_3baa86cf-f5d1-40e8-90cc-227ecfae98cf/keystone-api/0.log" Dec 02 19:37:03 crc kubenswrapper[4792]: I1202 19:37:03.087111 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e9a3a1a3-54f5-4734-aade-5e64bcad49a4/kube-state-metrics/0.log" Dec 02 19:37:03 crc kubenswrapper[4792]: I1202 19:37:03.229124 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-chd8r_c0a31132-972a-4e92-b005-de8cacadfe2e/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:03 crc kubenswrapper[4792]: I1202 19:37:03.530441 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c7cc4d64c-s885f_034594f3-2d13-4657-9426-449348df341f/neutron-httpd/0.log" Dec 02 19:37:03 crc kubenswrapper[4792]: I1202 19:37:03.574505 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c7cc4d64c-s885f_034594f3-2d13-4657-9426-449348df341f/neutron-api/0.log" Dec 02 19:37:03 crc kubenswrapper[4792]: I1202 19:37:03.578623 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p_bf6f87b0-64cb-4aa6-87b5-f4496dd79953/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:04 crc kubenswrapper[4792]: I1202 19:37:04.320187 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_6ffde9a1-e11f-4216-890b-7992f6e1b84c/nova-api-log/0.log" Dec 02 19:37:04 crc kubenswrapper[4792]: I1202 19:37:04.354866 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c6ed774e-25d0-47b5-8dd4-1113a9310d29/nova-cell0-conductor-conductor/0.log" Dec 02 19:37:04 crc kubenswrapper[4792]: I1202 19:37:04.452342 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_a1884f2e-062d-4a08-aff1-59d9316bfff8/nova-cell1-conductor-conductor/0.log" Dec 02 19:37:04 crc kubenswrapper[4792]: I1202 19:37:04.529127 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_6ffde9a1-e11f-4216-890b-7992f6e1b84c/nova-api-api/0.log" Dec 02 19:37:04 crc kubenswrapper[4792]: I1202 19:37:04.621100 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_78a5099a-bc49-427d-b2c5-46adcda0e3e9/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 19:37:04 crc kubenswrapper[4792]: I1202 19:37:04.771309 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-bsnvd_5516b9b1-aeb0-40a1-9eac-5c7799b85132/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:04 crc kubenswrapper[4792]: I1202 19:37:04.935749 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f0928111-92a0-4459-896b-507add4ebc25/nova-metadata-log/0.log" Dec 02 19:37:05 crc kubenswrapper[4792]: I1202 19:37:05.189802 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_15549a23-1a53-41b1-84f5-a7bfda08faed/nova-scheduler-scheduler/0.log" Dec 02 19:37:05 crc kubenswrapper[4792]: I1202 19:37:05.223824 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91bf1c70-0d0f-49f9-aae7-59865a7abd26/mysql-bootstrap/0.log" Dec 02 19:37:05 crc kubenswrapper[4792]: I1202 19:37:05.513335 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91bf1c70-0d0f-49f9-aae7-59865a7abd26/mysql-bootstrap/0.log" Dec 02 19:37:05 crc kubenswrapper[4792]: I1202 19:37:05.549168 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91bf1c70-0d0f-49f9-aae7-59865a7abd26/galera/0.log" Dec 02 19:37:05 crc kubenswrapper[4792]: I1202 19:37:05.881285 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c5e3683-f4d1-4f32-8c6d-ecc11415c660/mysql-bootstrap/0.log" Dec 02 19:37:05 crc kubenswrapper[4792]: I1202 19:37:05.949440 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f0928111-92a0-4459-896b-507add4ebc25/nova-metadata-metadata/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.124347 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c5e3683-f4d1-4f32-8c6d-ecc11415c660/mysql-bootstrap/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.269038 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_5703c717-1bce-4ccc-aff7-16c5fe72e724/openstackclient/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.354128 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c5e3683-f4d1-4f32-8c6d-ecc11415c660/galera/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.470901 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-nvx6l_0a44f4be-0f5d-45dc-9cb0-b4705d150c1a/openstack-network-exporter/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.711017 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovsdb-server-init/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.867669 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovs-vswitchd/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.922700 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovsdb-server/0.log" Dec 02 19:37:06 crc kubenswrapper[4792]: I1202 19:37:06.927011 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovsdb-server-init/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.111210 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-s44lp_2a5ad51d-6996-42c0-b156-600ff9dc7782/ovn-controller/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.171500 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-fdm4x_c5e968fa-782e-49cc-a729-ebf2f94b2bb3/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.344350 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_69c2b30c-76db-4d6d-a091-3a86040f34fd/openstack-network-exporter/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.385170 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_69c2b30c-76db-4d6d-a091-3a86040f34fd/ovn-northd/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.482716 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_db787f15-5115-48a8-9443-93f5da555d2a/openstack-network-exporter/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.652308 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_db787f15-5115-48a8-9443-93f5da555d2a/ovsdbserver-nb/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.794746 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6cfe9a05-cb43-47d3-84f8-95642cd098ec/ovsdbserver-sb/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.802753 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6cfe9a05-cb43-47d3-84f8-95642cd098ec/openstack-network-exporter/0.log" Dec 02 19:37:07 crc kubenswrapper[4792]: I1202 19:37:07.968292 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55bc995c96-sn8lv_a20a6e66-0ccb-41ae-a2ec-904e1dcada7b/placement-api/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.056000 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55bc995c96-sn8lv_a20a6e66-0ccb-41ae-a2ec-904e1dcada7b/placement-log/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.082131 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.082192 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.082235 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.083096 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.083173 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" gracePeriod=600 Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.089162 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/init-config-reloader/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: E1202 19:37:08.204109 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.301259 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/init-config-reloader/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.383644 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/config-reloader/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.438627 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/thanos-sidecar/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.444627 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/prometheus/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.660232 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c43c55d9-74e9-4158-a193-ee8ead807ad7/setup-container/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.965095 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c43c55d9-74e9-4158-a193-ee8ead807ad7/setup-container/0.log" Dec 02 19:37:08 crc kubenswrapper[4792]: I1202 19:37:08.992065 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c43c55d9-74e9-4158-a193-ee8ead807ad7/rabbitmq/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.005108 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_197e738b-95d3-4250-b16a-e70331f46ba5/setup-container/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.007810 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" exitCode=0 Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.007843 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49"} Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.007875 4792 scope.go:117] "RemoveContainer" containerID="1316731db97579edd6c406f74dd573b0d2f7753051feb84c8996194fbf2c5381" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.008439 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:37:09 crc kubenswrapper[4792]: E1202 19:37:09.008719 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.184673 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_197e738b-95d3-4250-b16a-e70331f46ba5/setup-container/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.210398 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_197e738b-95d3-4250-b16a-e70331f46ba5/rabbitmq/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.333095 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9_5f606874-5cd7-4b0c-b092-c2a2d2e94728/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.392790 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-6cgm9_8133173a-74f1-44d0-ab0d-609e15a2754a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.595626 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn_a93efefe-ec0a-45dc-8276-6d99cb2b4db8/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.685824 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-bt7b8_38b7b309-7222-4bf3-b8c1-33b0d01f7c29/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:09 crc kubenswrapper[4792]: I1202 19:37:09.869847 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-xdsm4_e957a792-604a-439d-8fa3-271edf600cac/ssh-known-hosts-edpm-deployment/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.033834 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-79bc665747-kkc2q_e3263958-3718-4ceb-8751-6fa73a1a60f5/proxy-httpd/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.304979 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-79bc665747-kkc2q_e3263958-3718-4ceb-8751-6fa73a1a60f5/proxy-server/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.396511 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-jd92w_865e7f48-168c-4b42-a6a0-308250071747/swift-ring-rebalance/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.530323 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-auditor/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.564686 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-reaper/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.590717 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-server/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.686796 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-replicator/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.739312 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-auditor/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.798055 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-replicator/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.812993 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-server/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.915315 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-updater/0.log" Dec 02 19:37:10 crc kubenswrapper[4792]: I1202 19:37:10.984672 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-auditor/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.068967 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-replicator/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.071394 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-expirer/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.201307 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-server/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.212701 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-updater/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.313833 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/rsync/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.319935 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/swift-recon-cron/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.472093 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4_6aeeaf00-b476-4d91-a807-92fb47391287/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.584071 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_22c610ff-ae47-48ac-8fea-c3ab17f23106/tempest-tests-tempest-tests-runner/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.711027 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_9484b035-7d4b-487e-b070-f935fb55389e/test-operator-logs-container/0.log" Dec 02 19:37:11 crc kubenswrapper[4792]: I1202 19:37:11.793218 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-t4l49_c16326ab-5471-4840-98cc-670d5601a873/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:37:16 crc kubenswrapper[4792]: I1202 19:37:16.866251 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_2951974e-17c4-4cf6-b244-6efc7a6fc742/memcached/0.log" Dec 02 19:37:20 crc kubenswrapper[4792]: I1202 19:37:20.540564 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:37:20 crc kubenswrapper[4792]: E1202 19:37:20.541469 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:37:35 crc kubenswrapper[4792]: I1202 19:37:35.540273 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:37:35 crc kubenswrapper[4792]: E1202 19:37:35.541329 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.542369 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7hn6j"] Dec 02 19:37:36 crc kubenswrapper[4792]: E1202 19:37:36.543237 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d977170-bce4-4bf7-bc14-7037a294b3f6" containerName="container-00" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.543256 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d977170-bce4-4bf7-bc14-7037a294b3f6" containerName="container-00" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.543545 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d977170-bce4-4bf7-bc14-7037a294b3f6" containerName="container-00" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.545502 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.556905 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7hn6j"] Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.703482 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-utilities\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.703728 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-catalog-content\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.703805 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84lgp\" (UniqueName: \"kubernetes.io/projected/139f3350-fb09-41eb-bfe6-a310d073c68d-kube-api-access-84lgp\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.805743 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-catalog-content\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.805824 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84lgp\" (UniqueName: \"kubernetes.io/projected/139f3350-fb09-41eb-bfe6-a310d073c68d-kube-api-access-84lgp\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.805888 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-utilities\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.806328 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-catalog-content\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.806343 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-utilities\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.840345 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84lgp\" (UniqueName: \"kubernetes.io/projected/139f3350-fb09-41eb-bfe6-a310d073c68d-kube-api-access-84lgp\") pod \"redhat-operators-7hn6j\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:36 crc kubenswrapper[4792]: I1202 19:37:36.920122 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:37 crc kubenswrapper[4792]: I1202 19:37:37.392846 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7hn6j"] Dec 02 19:37:38 crc kubenswrapper[4792]: I1202 19:37:38.295112 4792 generic.go:334] "Generic (PLEG): container finished" podID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerID="a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5" exitCode=0 Dec 02 19:37:38 crc kubenswrapper[4792]: I1202 19:37:38.295202 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7hn6j" event={"ID":"139f3350-fb09-41eb-bfe6-a310d073c68d","Type":"ContainerDied","Data":"a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5"} Dec 02 19:37:38 crc kubenswrapper[4792]: I1202 19:37:38.295377 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7hn6j" event={"ID":"139f3350-fb09-41eb-bfe6-a310d073c68d","Type":"ContainerStarted","Data":"a8512466b409eedf42d0f750a1695744644511983f87097aa9b077ae264616a4"} Dec 02 19:37:39 crc kubenswrapper[4792]: I1202 19:37:39.489339 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-89kmd_daa6dcd6-39c7-44fc-9754-7de254748ec3/kube-rbac-proxy/0.log" Dec 02 19:37:39 crc kubenswrapper[4792]: I1202 19:37:39.551957 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-89kmd_daa6dcd6-39c7-44fc-9754-7de254748ec3/manager/0.log" Dec 02 19:37:39 crc kubenswrapper[4792]: I1202 19:37:39.679280 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-fmggq_9f1a320f-5255-4fc4-b973-39ce2aee3bae/kube-rbac-proxy/0.log" Dec 02 19:37:39 crc kubenswrapper[4792]: I1202 19:37:39.724086 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-fmggq_9f1a320f-5255-4fc4-b973-39ce2aee3bae/manager/0.log" Dec 02 19:37:39 crc kubenswrapper[4792]: I1202 19:37:39.832563 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-7qgqw_fce25a63-01bd-458a-9567-f08f710abec9/kube-rbac-proxy/0.log" Dec 02 19:37:39 crc kubenswrapper[4792]: I1202 19:37:39.868544 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-7qgqw_fce25a63-01bd-458a-9567-f08f710abec9/manager/0.log" Dec 02 19:37:39 crc kubenswrapper[4792]: I1202 19:37:39.977600 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/util/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.162356 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/pull/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.191699 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/util/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.193880 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/pull/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.436960 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/pull/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.437136 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/util/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.466810 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/extract/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.655216 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zpmp6_851b5fce-f6b9-4fef-a80c-e66336c5fa49/kube-rbac-proxy/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.657736 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zpmp6_851b5fce-f6b9-4fef-a80c-e66336c5fa49/manager/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.739854 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xbg4j_77a52e44-0fcb-4b97-93de-0d26a6901c37/kube-rbac-proxy/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.840473 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xbg4j_77a52e44-0fcb-4b97-93de-0d26a6901c37/manager/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.907780 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-ddpv4_a725d6d0-4642-4316-9e67-e002d58f7117/kube-rbac-proxy/0.log" Dec 02 19:37:40 crc kubenswrapper[4792]: I1202 19:37:40.912268 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-ddpv4_a725d6d0-4642-4316-9e67-e002d58f7117/manager/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.128112 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-l4tvg_4998553b-ffbc-4684-9756-22885fec1a98/kube-rbac-proxy/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.300262 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-l4tvg_4998553b-ffbc-4684-9756-22885fec1a98/manager/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.307665 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-92mth_e3a43b00-c682-4d04-9996-ceb79a245a18/kube-rbac-proxy/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.349970 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7hn6j" event={"ID":"139f3350-fb09-41eb-bfe6-a310d073c68d","Type":"ContainerStarted","Data":"145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53"} Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.446045 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-92mth_e3a43b00-c682-4d04-9996-ceb79a245a18/manager/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.597473 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xrbqg_9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9/kube-rbac-proxy/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.647193 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-g8nwt_15e497ba-5375-4926-80f5-f46940572f8f/kube-rbac-proxy/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.671595 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xrbqg_9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9/manager/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.780985 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-g8nwt_15e497ba-5375-4926-80f5-f46940572f8f/manager/0.log" Dec 02 19:37:41 crc kubenswrapper[4792]: I1202 19:37:41.911428 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-zm7n7_2abae0c8-1cd8-4329-a4dc-678124e1195a/kube-rbac-proxy/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.030718 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-zm7n7_2abae0c8-1cd8-4329-a4dc-678124e1195a/manager/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.089430 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-b7kc7_ef4ea028-2f42-4560-aad7-94553ba2d3d4/kube-rbac-proxy/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.196586 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-ksx7c_588c52cc-05c0-438d-bb0f-80bc1236d8cc/kube-rbac-proxy/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.220936 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-b7kc7_ef4ea028-2f42-4560-aad7-94553ba2d3d4/manager/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.401740 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-98tg4_dde05ba1-9b55-4f92-9782-d03fed8f26b0/kube-rbac-proxy/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.447216 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-98tg4_dde05ba1-9b55-4f92-9782-d03fed8f26b0/manager/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.455933 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-ksx7c_588c52cc-05c0-438d-bb0f-80bc1236d8cc/manager/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.607483 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj_8bd01614-55c6-44bf-b67b-8a6570d9425c/kube-rbac-proxy/0.log" Dec 02 19:37:42 crc kubenswrapper[4792]: I1202 19:37:42.753394 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj_8bd01614-55c6-44bf-b67b-8a6570d9425c/manager/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.052878 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-gm2bf_15960e0b-8e49-4e4b-b236-5efc49470e11/registry-server/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.096764 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6c49cf65b-lk99g_b729afba-684d-4ecf-a503-fadb0e933192/operator/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.340751 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-qrj8t_8becc537-85f1-4b33-8b6a-1ef3bc550cdd/kube-rbac-proxy/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.375702 4792 generic.go:334] "Generic (PLEG): container finished" podID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerID="145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53" exitCode=0 Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.375760 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7hn6j" event={"ID":"139f3350-fb09-41eb-bfe6-a310d073c68d","Type":"ContainerDied","Data":"145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53"} Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.437120 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-qrj8t_8becc537-85f1-4b33-8b6a-1ef3bc550cdd/manager/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.446272 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kqqtk_c305906f-16d5-4e43-9666-299106995d65/kube-rbac-proxy/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.600478 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kqqtk_c305906f-16d5-4e43-9666-299106995d65/manager/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.636832 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-xn9ms_feb0adee-ff46-4603-80f1-a086af7e863c/operator/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.804294 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cfpd2_64a3a015-bcba-4079-b30b-47579e9a7513/kube-rbac-proxy/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.955174 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cfpd2_64a3a015-bcba-4079-b30b-47579e9a7513/manager/0.log" Dec 02 19:37:43 crc kubenswrapper[4792]: I1202 19:37:43.966804 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6cc9d48475-tplrw_474ebfec-9504-4baa-a320-af5bd167bf33/kube-rbac-proxy/0.log" Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.149117 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54d77c4c6-68vgq_f2e8a63e-9ce0-4009-b041-46c7f29daa11/manager/0.log" Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.183343 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d922f_746154e5-b7a7-4ce9-b0db-4c88c998ccac/manager/0.log" Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.202249 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d922f_746154e5-b7a7-4ce9-b0db-4c88c998ccac/kube-rbac-proxy/0.log" Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.386657 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7hn6j" event={"ID":"139f3350-fb09-41eb-bfe6-a310d073c68d","Type":"ContainerStarted","Data":"2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42"} Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.412064 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7hn6j" podStartSLOduration=2.7496933930000003 podStartE2EDuration="8.412046209s" podCreationTimestamp="2025-12-02 19:37:36 +0000 UTC" firstStartedPulling="2025-12-02 19:37:38.297582985 +0000 UTC m=+3689.070475313" lastFinishedPulling="2025-12-02 19:37:43.959935811 +0000 UTC m=+3694.732828129" observedRunningTime="2025-12-02 19:37:44.405736264 +0000 UTC m=+3695.178628592" watchObservedRunningTime="2025-12-02 19:37:44.412046209 +0000 UTC m=+3695.184938537" Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.464915 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6cc9d48475-tplrw_474ebfec-9504-4baa-a320-af5bd167bf33/manager/0.log" Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.490748 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-6v9cs_ad6c4009-148b-4b91-bd36-4d9bd2a16bed/kube-rbac-proxy/0.log" Dec 02 19:37:44 crc kubenswrapper[4792]: I1202 19:37:44.564595 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-6v9cs_ad6c4009-148b-4b91-bd36-4d9bd2a16bed/manager/0.log" Dec 02 19:37:46 crc kubenswrapper[4792]: I1202 19:37:46.921286 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:46 crc kubenswrapper[4792]: I1202 19:37:46.924035 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:47 crc kubenswrapper[4792]: I1202 19:37:47.979465 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7hn6j" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="registry-server" probeResult="failure" output=< Dec 02 19:37:47 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 19:37:47 crc kubenswrapper[4792]: > Dec 02 19:37:50 crc kubenswrapper[4792]: I1202 19:37:50.540026 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:37:50 crc kubenswrapper[4792]: E1202 19:37:50.540887 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:37:56 crc kubenswrapper[4792]: I1202 19:37:56.987982 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:57 crc kubenswrapper[4792]: I1202 19:37:57.051229 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:57 crc kubenswrapper[4792]: I1202 19:37:57.229185 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7hn6j"] Dec 02 19:37:58 crc kubenswrapper[4792]: I1202 19:37:58.516207 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7hn6j" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="registry-server" containerID="cri-o://2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42" gracePeriod=2 Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.229598 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.343977 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84lgp\" (UniqueName: \"kubernetes.io/projected/139f3350-fb09-41eb-bfe6-a310d073c68d-kube-api-access-84lgp\") pod \"139f3350-fb09-41eb-bfe6-a310d073c68d\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.344268 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-catalog-content\") pod \"139f3350-fb09-41eb-bfe6-a310d073c68d\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.344298 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-utilities\") pod \"139f3350-fb09-41eb-bfe6-a310d073c68d\" (UID: \"139f3350-fb09-41eb-bfe6-a310d073c68d\") " Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.346069 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-utilities" (OuterVolumeSpecName: "utilities") pod "139f3350-fb09-41eb-bfe6-a310d073c68d" (UID: "139f3350-fb09-41eb-bfe6-a310d073c68d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.352463 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/139f3350-fb09-41eb-bfe6-a310d073c68d-kube-api-access-84lgp" (OuterVolumeSpecName: "kube-api-access-84lgp") pod "139f3350-fb09-41eb-bfe6-a310d073c68d" (UID: "139f3350-fb09-41eb-bfe6-a310d073c68d"). InnerVolumeSpecName "kube-api-access-84lgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.446883 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84lgp\" (UniqueName: \"kubernetes.io/projected/139f3350-fb09-41eb-bfe6-a310d073c68d-kube-api-access-84lgp\") on node \"crc\" DevicePath \"\"" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.446922 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.452686 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "139f3350-fb09-41eb-bfe6-a310d073c68d" (UID: "139f3350-fb09-41eb-bfe6-a310d073c68d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.527375 4792 generic.go:334] "Generic (PLEG): container finished" podID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerID="2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42" exitCode=0 Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.527427 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7hn6j" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.527431 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7hn6j" event={"ID":"139f3350-fb09-41eb-bfe6-a310d073c68d","Type":"ContainerDied","Data":"2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42"} Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.527495 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7hn6j" event={"ID":"139f3350-fb09-41eb-bfe6-a310d073c68d","Type":"ContainerDied","Data":"a8512466b409eedf42d0f750a1695744644511983f87097aa9b077ae264616a4"} Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.527515 4792 scope.go:117] "RemoveContainer" containerID="2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.548125 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/139f3350-fb09-41eb-bfe6-a310d073c68d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.554111 4792 scope.go:117] "RemoveContainer" containerID="145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.568598 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7hn6j"] Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.579187 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7hn6j"] Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.595935 4792 scope.go:117] "RemoveContainer" containerID="a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.657461 4792 scope.go:117] "RemoveContainer" containerID="2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42" Dec 02 19:37:59 crc kubenswrapper[4792]: E1202 19:37:59.658035 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42\": container with ID starting with 2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42 not found: ID does not exist" containerID="2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.658086 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42"} err="failed to get container status \"2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42\": rpc error: code = NotFound desc = could not find container \"2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42\": container with ID starting with 2e4854ebf2c5a5586aa041db2057af38d77df8bb7cfeeb6bc89056e290d43d42 not found: ID does not exist" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.658123 4792 scope.go:117] "RemoveContainer" containerID="145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53" Dec 02 19:37:59 crc kubenswrapper[4792]: E1202 19:37:59.658618 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53\": container with ID starting with 145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53 not found: ID does not exist" containerID="145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.658660 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53"} err="failed to get container status \"145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53\": rpc error: code = NotFound desc = could not find container \"145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53\": container with ID starting with 145b52cf85a2fbbdbc4d63497f43d7409418fb8335e1a5c865b61de658020f53 not found: ID does not exist" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.658687 4792 scope.go:117] "RemoveContainer" containerID="a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5" Dec 02 19:37:59 crc kubenswrapper[4792]: E1202 19:37:59.659024 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5\": container with ID starting with a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5 not found: ID does not exist" containerID="a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5" Dec 02 19:37:59 crc kubenswrapper[4792]: I1202 19:37:59.659068 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5"} err="failed to get container status \"a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5\": rpc error: code = NotFound desc = could not find container \"a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5\": container with ID starting with a00d777aaa5d5fc537279d48a63ce6f65e7a0a751fae3948b9c9bd0d041e45c5 not found: ID does not exist" Dec 02 19:38:01 crc kubenswrapper[4792]: I1202 19:38:01.553377 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" path="/var/lib/kubelet/pods/139f3350-fb09-41eb-bfe6-a310d073c68d/volumes" Dec 02 19:38:02 crc kubenswrapper[4792]: I1202 19:38:02.560222 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:38:02 crc kubenswrapper[4792]: E1202 19:38:02.560853 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:38:04 crc kubenswrapper[4792]: I1202 19:38:04.917677 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-phfft_9053d7ed-27f1-470a-8164-6ef32c05ea87/control-plane-machine-set-operator/0.log" Dec 02 19:38:05 crc kubenswrapper[4792]: I1202 19:38:05.078950 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nnst7_fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1/kube-rbac-proxy/0.log" Dec 02 19:38:05 crc kubenswrapper[4792]: I1202 19:38:05.093831 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nnst7_fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1/machine-api-operator/0.log" Dec 02 19:38:17 crc kubenswrapper[4792]: I1202 19:38:17.539848 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:38:17 crc kubenswrapper[4792]: E1202 19:38:17.540535 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:38:19 crc kubenswrapper[4792]: I1202 19:38:19.364417 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-j9t7f_55936820-08a3-4569-a1c3-a2c8ff5ce620/cert-manager-controller/0.log" Dec 02 19:38:19 crc kubenswrapper[4792]: I1202 19:38:19.532916 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-qxjx2_99286d04-ee0d-49ca-84f4-4e7dd9fd9e76/cert-manager-cainjector/0.log" Dec 02 19:38:19 crc kubenswrapper[4792]: I1202 19:38:19.573593 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-2lqc8_a125909d-e0cc-4e4d-ad34-361379b74bf4/cert-manager-webhook/0.log" Dec 02 19:38:29 crc kubenswrapper[4792]: I1202 19:38:29.554783 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:38:29 crc kubenswrapper[4792]: E1202 19:38:29.555894 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:38:33 crc kubenswrapper[4792]: I1202 19:38:33.802856 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-gs72k_1609afe3-03e9-4bab-8ea5-444ffe47a8a0/nmstate-console-plugin/0.log" Dec 02 19:38:33 crc kubenswrapper[4792]: I1202 19:38:33.905175 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-cvm76_59543a69-801f-485f-b683-b9328aab396e/nmstate-handler/0.log" Dec 02 19:38:33 crc kubenswrapper[4792]: I1202 19:38:33.968729 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-svbm7_34e1a790-24ea-4564-8453-f525053ec5fa/nmstate-metrics/0.log" Dec 02 19:38:34 crc kubenswrapper[4792]: I1202 19:38:34.036808 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-svbm7_34e1a790-24ea-4564-8453-f525053ec5fa/kube-rbac-proxy/0.log" Dec 02 19:38:34 crc kubenswrapper[4792]: I1202 19:38:34.151957 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-npmdr_9874a853-6f18-456b-9634-c3b923e8113c/nmstate-operator/0.log" Dec 02 19:38:34 crc kubenswrapper[4792]: I1202 19:38:34.220831 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-w46s7_0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0/nmstate-webhook/0.log" Dec 02 19:38:42 crc kubenswrapper[4792]: I1202 19:38:42.539756 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:38:42 crc kubenswrapper[4792]: E1202 19:38:42.540638 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.075344 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nczl8"] Dec 02 19:38:47 crc kubenswrapper[4792]: E1202 19:38:47.077259 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="registry-server" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.077335 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="registry-server" Dec 02 19:38:47 crc kubenswrapper[4792]: E1202 19:38:47.077409 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="extract-content" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.077464 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="extract-content" Dec 02 19:38:47 crc kubenswrapper[4792]: E1202 19:38:47.077564 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="extract-utilities" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.077626 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="extract-utilities" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.077872 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="139f3350-fb09-41eb-bfe6-a310d073c68d" containerName="registry-server" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.080136 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.093101 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nczl8"] Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.162393 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp8bd\" (UniqueName: \"kubernetes.io/projected/741b34f7-7d0f-4d82-a77a-116da78a04d8-kube-api-access-fp8bd\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.162487 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-catalog-content\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.162547 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-utilities\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.264328 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-catalog-content\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.264838 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-catalog-content\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.264927 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-utilities\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.265157 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-utilities\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.265243 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp8bd\" (UniqueName: \"kubernetes.io/projected/741b34f7-7d0f-4d82-a77a-116da78a04d8-kube-api-access-fp8bd\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.285282 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp8bd\" (UniqueName: \"kubernetes.io/projected/741b34f7-7d0f-4d82-a77a-116da78a04d8-kube-api-access-fp8bd\") pod \"certified-operators-nczl8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.396510 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:47 crc kubenswrapper[4792]: I1202 19:38:47.908790 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nczl8"] Dec 02 19:38:48 crc kubenswrapper[4792]: I1202 19:38:48.015315 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nczl8" event={"ID":"741b34f7-7d0f-4d82-a77a-116da78a04d8","Type":"ContainerStarted","Data":"b51a828de571315c7949a01d1b001c8e830adf23505aaf6b0fcc6534c7eec6fe"} Dec 02 19:38:48 crc kubenswrapper[4792]: I1202 19:38:48.626436 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/kube-rbac-proxy/0.log" Dec 02 19:38:48 crc kubenswrapper[4792]: I1202 19:38:48.634296 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/manager/0.log" Dec 02 19:38:49 crc kubenswrapper[4792]: I1202 19:38:49.029467 4792 generic.go:334] "Generic (PLEG): container finished" podID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerID="15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d" exitCode=0 Dec 02 19:38:49 crc kubenswrapper[4792]: I1202 19:38:49.029545 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nczl8" event={"ID":"741b34f7-7d0f-4d82-a77a-116da78a04d8","Type":"ContainerDied","Data":"15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d"} Dec 02 19:38:51 crc kubenswrapper[4792]: I1202 19:38:51.055803 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nczl8" event={"ID":"741b34f7-7d0f-4d82-a77a-116da78a04d8","Type":"ContainerStarted","Data":"0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1"} Dec 02 19:38:52 crc kubenswrapper[4792]: I1202 19:38:52.065906 4792 generic.go:334] "Generic (PLEG): container finished" podID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerID="0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1" exitCode=0 Dec 02 19:38:52 crc kubenswrapper[4792]: I1202 19:38:52.065945 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nczl8" event={"ID":"741b34f7-7d0f-4d82-a77a-116da78a04d8","Type":"ContainerDied","Data":"0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1"} Dec 02 19:38:54 crc kubenswrapper[4792]: I1202 19:38:54.088278 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nczl8" event={"ID":"741b34f7-7d0f-4d82-a77a-116da78a04d8","Type":"ContainerStarted","Data":"f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501"} Dec 02 19:38:54 crc kubenswrapper[4792]: I1202 19:38:54.108283 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nczl8" podStartSLOduration=3.241806608 podStartE2EDuration="7.108269851s" podCreationTimestamp="2025-12-02 19:38:47 +0000 UTC" firstStartedPulling="2025-12-02 19:38:49.03180239 +0000 UTC m=+3759.804694738" lastFinishedPulling="2025-12-02 19:38:52.898265653 +0000 UTC m=+3763.671157981" observedRunningTime="2025-12-02 19:38:54.105404376 +0000 UTC m=+3764.878296704" watchObservedRunningTime="2025-12-02 19:38:54.108269851 +0000 UTC m=+3764.881162179" Dec 02 19:38:56 crc kubenswrapper[4792]: I1202 19:38:56.540346 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:38:56 crc kubenswrapper[4792]: E1202 19:38:56.540766 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:38:57 crc kubenswrapper[4792]: I1202 19:38:57.397460 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:57 crc kubenswrapper[4792]: I1202 19:38:57.397571 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:57 crc kubenswrapper[4792]: I1202 19:38:57.461497 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:59 crc kubenswrapper[4792]: I1202 19:38:59.508653 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:38:59 crc kubenswrapper[4792]: I1202 19:38:59.562010 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nczl8"] Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.161980 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nczl8" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="registry-server" containerID="cri-o://f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501" gracePeriod=2 Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.808427 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.869387 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-catalog-content\") pod \"741b34f7-7d0f-4d82-a77a-116da78a04d8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.869620 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-utilities\") pod \"741b34f7-7d0f-4d82-a77a-116da78a04d8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.869659 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fp8bd\" (UniqueName: \"kubernetes.io/projected/741b34f7-7d0f-4d82-a77a-116da78a04d8-kube-api-access-fp8bd\") pod \"741b34f7-7d0f-4d82-a77a-116da78a04d8\" (UID: \"741b34f7-7d0f-4d82-a77a-116da78a04d8\") " Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.871027 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-utilities" (OuterVolumeSpecName: "utilities") pod "741b34f7-7d0f-4d82-a77a-116da78a04d8" (UID: "741b34f7-7d0f-4d82-a77a-116da78a04d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.876719 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/741b34f7-7d0f-4d82-a77a-116da78a04d8-kube-api-access-fp8bd" (OuterVolumeSpecName: "kube-api-access-fp8bd") pod "741b34f7-7d0f-4d82-a77a-116da78a04d8" (UID: "741b34f7-7d0f-4d82-a77a-116da78a04d8"). InnerVolumeSpecName "kube-api-access-fp8bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.926162 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "741b34f7-7d0f-4d82-a77a-116da78a04d8" (UID: "741b34f7-7d0f-4d82-a77a-116da78a04d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.971776 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.971807 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741b34f7-7d0f-4d82-a77a-116da78a04d8-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:39:00 crc kubenswrapper[4792]: I1202 19:39:00.971819 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fp8bd\" (UniqueName: \"kubernetes.io/projected/741b34f7-7d0f-4d82-a77a-116da78a04d8-kube-api-access-fp8bd\") on node \"crc\" DevicePath \"\"" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.178224 4792 generic.go:334] "Generic (PLEG): container finished" podID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerID="f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501" exitCode=0 Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.178284 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nczl8" event={"ID":"741b34f7-7d0f-4d82-a77a-116da78a04d8","Type":"ContainerDied","Data":"f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501"} Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.178318 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nczl8" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.178370 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nczl8" event={"ID":"741b34f7-7d0f-4d82-a77a-116da78a04d8","Type":"ContainerDied","Data":"b51a828de571315c7949a01d1b001c8e830adf23505aaf6b0fcc6534c7eec6fe"} Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.178404 4792 scope.go:117] "RemoveContainer" containerID="f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.213698 4792 scope.go:117] "RemoveContainer" containerID="0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.220116 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nczl8"] Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.242027 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nczl8"] Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.268139 4792 scope.go:117] "RemoveContainer" containerID="15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.306423 4792 scope.go:117] "RemoveContainer" containerID="f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501" Dec 02 19:39:01 crc kubenswrapper[4792]: E1202 19:39:01.307105 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501\": container with ID starting with f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501 not found: ID does not exist" containerID="f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.307213 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501"} err="failed to get container status \"f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501\": rpc error: code = NotFound desc = could not find container \"f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501\": container with ID starting with f9dccf0f2a5b6c7d49ffd0fa68f4db35fb11755d35fd3a2ed7ea1d5de74ec501 not found: ID does not exist" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.307305 4792 scope.go:117] "RemoveContainer" containerID="0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1" Dec 02 19:39:01 crc kubenswrapper[4792]: E1202 19:39:01.307609 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1\": container with ID starting with 0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1 not found: ID does not exist" containerID="0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.307632 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1"} err="failed to get container status \"0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1\": rpc error: code = NotFound desc = could not find container \"0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1\": container with ID starting with 0834667145bf75abd2a10b45317392d0ce630cc7862d366d848a86c6d7ac42d1 not found: ID does not exist" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.307645 4792 scope.go:117] "RemoveContainer" containerID="15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d" Dec 02 19:39:01 crc kubenswrapper[4792]: E1202 19:39:01.308371 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d\": container with ID starting with 15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d not found: ID does not exist" containerID="15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.308397 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d"} err="failed to get container status \"15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d\": rpc error: code = NotFound desc = could not find container \"15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d\": container with ID starting with 15f03a438443ab207cf484351ae0f45fa2aa85488f6fc302961d1750689e6b4d not found: ID does not exist" Dec 02 19:39:01 crc kubenswrapper[4792]: I1202 19:39:01.551364 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" path="/var/lib/kubelet/pods/741b34f7-7d0f-4d82-a77a-116da78a04d8/volumes" Dec 02 19:39:05 crc kubenswrapper[4792]: I1202 19:39:05.853951 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-pjmcf_cbe95ba3-c2a1-4755-8571-ddaba0aca9d6/kube-rbac-proxy/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.067296 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-pjmcf_cbe95ba3-c2a1-4755-8571-ddaba0aca9d6/controller/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.085978 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.312460 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.312601 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.394462 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.406308 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.633387 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.647171 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.665658 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.673196 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.858587 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.870496 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.900444 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:39:06 crc kubenswrapper[4792]: I1202 19:39:06.925927 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/controller/0.log" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.051009 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/kube-rbac-proxy/0.log" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.059719 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/frr-metrics/0.log" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.105010 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/kube-rbac-proxy-frr/0.log" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.272512 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/reloader/0.log" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.460949 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-cjmwb_4be13a2e-5592-4f44-ad74-31cf277205bf/frr-k8s-webhook-server/0.log" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.539628 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:39:07 crc kubenswrapper[4792]: E1202 19:39:07.539916 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.840899 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7d46f7f9d9-8xxzl_6ba2dc70-5519-41e5-b9fe-57508fd8e395/manager/0.log" Dec 02 19:39:07 crc kubenswrapper[4792]: I1202 19:39:07.978640 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6974b4d988-96g88_11d281b1-8805-4a7a-b234-53a4e52ae307/webhook-server/0.log" Dec 02 19:39:08 crc kubenswrapper[4792]: I1202 19:39:08.144484 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/frr/0.log" Dec 02 19:39:08 crc kubenswrapper[4792]: I1202 19:39:08.161451 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fmdjd_38a2f9f3-6ce0-4790-8d68-59a8fc723caa/kube-rbac-proxy/0.log" Dec 02 19:39:08 crc kubenswrapper[4792]: I1202 19:39:08.509670 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fmdjd_38a2f9f3-6ce0-4790-8d68-59a8fc723caa/speaker/0.log" Dec 02 19:39:20 crc kubenswrapper[4792]: I1202 19:39:20.539957 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:39:20 crc kubenswrapper[4792]: E1202 19:39:20.540664 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.045705 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/util/0.log" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.218408 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/util/0.log" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.339888 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/pull/0.log" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.368625 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/pull/0.log" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.697827 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/util/0.log" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.864154 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/pull/0.log" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.882385 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/extract/0.log" Dec 02 19:39:25 crc kubenswrapper[4792]: I1202 19:39:25.900179 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/util/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.100147 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/util/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.105424 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/pull/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.116385 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/pull/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.301888 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/util/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.306317 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/pull/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.323868 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/extract/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.467834 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/util/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.665610 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/util/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.744488 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/pull/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.764686 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/pull/0.log" Dec 02 19:39:26 crc kubenswrapper[4792]: I1202 19:39:26.967885 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/extract/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.007591 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/util/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.012722 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/pull/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.127106 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/util/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.330106 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/pull/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.333181 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/pull/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.362015 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/util/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.535321 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/pull/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.535910 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/extract/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.572115 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/util/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.714386 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/util/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.950757 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/pull/0.log" Dec 02 19:39:27 crc kubenswrapper[4792]: I1202 19:39:27.963043 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/util/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.039852 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/pull/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.205320 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/util/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.222438 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/extract/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.260352 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/pull/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.412185 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-utilities/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.622077 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-content/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.628753 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-content/0.log" Dec 02 19:39:28 crc kubenswrapper[4792]: I1202 19:39:28.642484 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-utilities/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.228120 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-content/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.228152 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-utilities/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.284036 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-utilities/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.517895 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-content/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.537946 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-content/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.546647 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/registry-server/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.547186 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-utilities/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.747214 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-content/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.780010 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-utilities/0.log" Dec 02 19:39:29 crc kubenswrapper[4792]: I1202 19:39:29.797788 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7r9rs_1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e/marketplace-operator/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.092933 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-utilities/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.302840 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/registry-server/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.350858 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-utilities/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.379402 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-content/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.381833 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-content/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.513050 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-utilities/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.523662 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-content/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.574035 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-utilities/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.717896 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/registry-server/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.728823 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-utilities/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.807019 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-content/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.838015 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-content/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.963384 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-utilities/0.log" Dec 02 19:39:30 crc kubenswrapper[4792]: I1202 19:39:30.994790 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-content/0.log" Dec 02 19:39:31 crc kubenswrapper[4792]: I1202 19:39:31.414562 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/registry-server/0.log" Dec 02 19:39:34 crc kubenswrapper[4792]: I1202 19:39:34.540134 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:39:34 crc kubenswrapper[4792]: E1202 19:39:34.540864 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:39:45 crc kubenswrapper[4792]: I1202 19:39:45.971613 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-5vh6b_9b8eb6d9-8320-401e-8092-5333c1772c4e/prometheus-operator/0.log" Dec 02 19:39:46 crc kubenswrapper[4792]: I1202 19:39:46.155471 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4/prometheus-operator-admission-webhook/0.log" Dec 02 19:39:46 crc kubenswrapper[4792]: I1202 19:39:46.239508 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_aeed3219-8084-40fd-888a-1e4bc4dd3179/prometheus-operator-admission-webhook/0.log" Dec 02 19:39:46 crc kubenswrapper[4792]: I1202 19:39:46.357887 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-jg8gh_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7/operator/0.log" Dec 02 19:39:46 crc kubenswrapper[4792]: I1202 19:39:46.445972 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-nqbpz_d953db38-bd34-4d90-9c21-64ed4b3feaaf/perses-operator/0.log" Dec 02 19:39:47 crc kubenswrapper[4792]: I1202 19:39:47.539980 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:39:47 crc kubenswrapper[4792]: E1202 19:39:47.540688 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:40:00 crc kubenswrapper[4792]: I1202 19:40:00.539620 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:40:00 crc kubenswrapper[4792]: E1202 19:40:00.540253 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:40:01 crc kubenswrapper[4792]: I1202 19:40:01.672985 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/kube-rbac-proxy/0.log" Dec 02 19:40:01 crc kubenswrapper[4792]: I1202 19:40:01.705282 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/manager/0.log" Dec 02 19:40:13 crc kubenswrapper[4792]: I1202 19:40:13.539316 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:40:13 crc kubenswrapper[4792]: E1202 19:40:13.540094 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:40:26 crc kubenswrapper[4792]: I1202 19:40:26.539743 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:40:26 crc kubenswrapper[4792]: E1202 19:40:26.540395 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:40:41 crc kubenswrapper[4792]: I1202 19:40:41.540688 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:40:41 crc kubenswrapper[4792]: E1202 19:40:41.541654 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:40:53 crc kubenswrapper[4792]: I1202 19:40:53.539502 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:40:53 crc kubenswrapper[4792]: E1202 19:40:53.540296 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:41:05 crc kubenswrapper[4792]: I1202 19:41:05.542341 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:41:05 crc kubenswrapper[4792]: E1202 19:41:05.543323 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:41:19 crc kubenswrapper[4792]: I1202 19:41:19.558978 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:41:19 crc kubenswrapper[4792]: E1202 19:41:19.560394 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:41:31 crc kubenswrapper[4792]: I1202 19:41:31.540132 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:41:31 crc kubenswrapper[4792]: E1202 19:41:31.541385 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:41:40 crc kubenswrapper[4792]: I1202 19:41:40.916939 4792 scope.go:117] "RemoveContainer" containerID="622c4d347d7d7aa711ea40da7063dce05b4d9f844778689d7781b9b5253138b4" Dec 02 19:41:43 crc kubenswrapper[4792]: I1202 19:41:43.885108 4792 generic.go:334] "Generic (PLEG): container finished" podID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerID="bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54" exitCode=0 Dec 02 19:41:43 crc kubenswrapper[4792]: I1202 19:41:43.885246 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-85rz5/must-gather-hbkj7" event={"ID":"6090e5ac-472b-4d4a-9a37-cfd91d7f1411","Type":"ContainerDied","Data":"bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54"} Dec 02 19:41:43 crc kubenswrapper[4792]: I1202 19:41:43.888315 4792 scope.go:117] "RemoveContainer" containerID="bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54" Dec 02 19:41:44 crc kubenswrapper[4792]: I1202 19:41:44.539275 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:41:44 crc kubenswrapper[4792]: E1202 19:41:44.539733 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:41:44 crc kubenswrapper[4792]: I1202 19:41:44.601234 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-85rz5_must-gather-hbkj7_6090e5ac-472b-4d4a-9a37-cfd91d7f1411/gather/0.log" Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.090800 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-85rz5/must-gather-hbkj7"] Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.091466 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-85rz5/must-gather-hbkj7" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerName="copy" containerID="cri-o://235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2" gracePeriod=2 Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.108489 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-85rz5/must-gather-hbkj7"] Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.757984 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-85rz5_must-gather-hbkj7_6090e5ac-472b-4d4a-9a37-cfd91d7f1411/copy/0.log" Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.758588 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.837301 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k86mb\" (UniqueName: \"kubernetes.io/projected/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-kube-api-access-k86mb\") pod \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.837347 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-must-gather-output\") pod \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\" (UID: \"6090e5ac-472b-4d4a-9a37-cfd91d7f1411\") " Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.846781 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-kube-api-access-k86mb" (OuterVolumeSpecName: "kube-api-access-k86mb") pod "6090e5ac-472b-4d4a-9a37-cfd91d7f1411" (UID: "6090e5ac-472b-4d4a-9a37-cfd91d7f1411"). InnerVolumeSpecName "kube-api-access-k86mb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.940599 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k86mb\" (UniqueName: \"kubernetes.io/projected/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-kube-api-access-k86mb\") on node \"crc\" DevicePath \"\"" Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.988181 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-85rz5_must-gather-hbkj7_6090e5ac-472b-4d4a-9a37-cfd91d7f1411/copy/0.log" Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.989186 4792 generic.go:334] "Generic (PLEG): container finished" podID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerID="235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2" exitCode=143 Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.989241 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-85rz5/must-gather-hbkj7" Dec 02 19:41:53 crc kubenswrapper[4792]: I1202 19:41:53.989276 4792 scope.go:117] "RemoveContainer" containerID="235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2" Dec 02 19:41:54 crc kubenswrapper[4792]: I1202 19:41:54.008700 4792 scope.go:117] "RemoveContainer" containerID="bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54" Dec 02 19:41:54 crc kubenswrapper[4792]: I1202 19:41:54.046891 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "6090e5ac-472b-4d4a-9a37-cfd91d7f1411" (UID: "6090e5ac-472b-4d4a-9a37-cfd91d7f1411"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:41:54 crc kubenswrapper[4792]: I1202 19:41:54.086650 4792 scope.go:117] "RemoveContainer" containerID="235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2" Dec 02 19:41:54 crc kubenswrapper[4792]: E1202 19:41:54.087399 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2\": container with ID starting with 235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2 not found: ID does not exist" containerID="235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2" Dec 02 19:41:54 crc kubenswrapper[4792]: I1202 19:41:54.087541 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2"} err="failed to get container status \"235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2\": rpc error: code = NotFound desc = could not find container \"235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2\": container with ID starting with 235abfca656e2dfaa69e0da18253973ae054cd541fb3ad48526e757542eb98f2 not found: ID does not exist" Dec 02 19:41:54 crc kubenswrapper[4792]: I1202 19:41:54.087662 4792 scope.go:117] "RemoveContainer" containerID="bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54" Dec 02 19:41:54 crc kubenswrapper[4792]: E1202 19:41:54.088188 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54\": container with ID starting with bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54 not found: ID does not exist" containerID="bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54" Dec 02 19:41:54 crc kubenswrapper[4792]: I1202 19:41:54.088210 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54"} err="failed to get container status \"bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54\": rpc error: code = NotFound desc = could not find container \"bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54\": container with ID starting with bc48bf6022c8e881d6abccd4f3f772438b851d194b067caad1cb9d089d265b54 not found: ID does not exist" Dec 02 19:41:54 crc kubenswrapper[4792]: I1202 19:41:54.146615 4792 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6090e5ac-472b-4d4a-9a37-cfd91d7f1411-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 19:41:55 crc kubenswrapper[4792]: I1202 19:41:55.555508 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" path="/var/lib/kubelet/pods/6090e5ac-472b-4d4a-9a37-cfd91d7f1411/volumes" Dec 02 19:41:57 crc kubenswrapper[4792]: I1202 19:41:57.539627 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:41:57 crc kubenswrapper[4792]: E1202 19:41:57.540139 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:42:12 crc kubenswrapper[4792]: I1202 19:42:12.540183 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:42:13 crc kubenswrapper[4792]: I1202 19:42:13.232933 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"91980fd36fc859ce6a68107c8e14bd4f51882c72f43243d77f08d00f806eea0a"} Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.887973 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8zjjz"] Dec 02 19:42:53 crc kubenswrapper[4792]: E1202 19:42:53.889264 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerName="gather" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889413 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerName="gather" Dec 02 19:42:53 crc kubenswrapper[4792]: E1202 19:42:53.889439 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerName="copy" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889450 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerName="copy" Dec 02 19:42:53 crc kubenswrapper[4792]: E1202 19:42:53.889506 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="registry-server" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889538 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="registry-server" Dec 02 19:42:53 crc kubenswrapper[4792]: E1202 19:42:53.889563 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="extract-content" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889576 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="extract-content" Dec 02 19:42:53 crc kubenswrapper[4792]: E1202 19:42:53.889601 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="extract-utilities" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889613 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="extract-utilities" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889934 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerName="copy" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889972 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="741b34f7-7d0f-4d82-a77a-116da78a04d8" containerName="registry-server" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.889988 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="6090e5ac-472b-4d4a-9a37-cfd91d7f1411" containerName="gather" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.892153 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.897646 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8zjjz"] Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.960333 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-utilities\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.960485 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrf75\" (UniqueName: \"kubernetes.io/projected/865c4795-3878-4277-9ee4-76bda2364bbb-kube-api-access-hrf75\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:53 crc kubenswrapper[4792]: I1202 19:42:53.960617 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-catalog-content\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.062872 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrf75\" (UniqueName: \"kubernetes.io/projected/865c4795-3878-4277-9ee4-76bda2364bbb-kube-api-access-hrf75\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.062958 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-catalog-content\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.063051 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-utilities\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.063559 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-utilities\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.064115 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-catalog-content\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.082068 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrf75\" (UniqueName: \"kubernetes.io/projected/865c4795-3878-4277-9ee4-76bda2364bbb-kube-api-access-hrf75\") pod \"community-operators-8zjjz\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.223402 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:42:54 crc kubenswrapper[4792]: I1202 19:42:54.725450 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8zjjz"] Dec 02 19:42:55 crc kubenswrapper[4792]: I1202 19:42:55.703257 4792 generic.go:334] "Generic (PLEG): container finished" podID="865c4795-3878-4277-9ee4-76bda2364bbb" containerID="2c312144784553ea36a5c3380aaab3661fc2150dc1e7cf9bd47c3355c8ebbb1e" exitCode=0 Dec 02 19:42:55 crc kubenswrapper[4792]: I1202 19:42:55.703298 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8zjjz" event={"ID":"865c4795-3878-4277-9ee4-76bda2364bbb","Type":"ContainerDied","Data":"2c312144784553ea36a5c3380aaab3661fc2150dc1e7cf9bd47c3355c8ebbb1e"} Dec 02 19:42:55 crc kubenswrapper[4792]: I1202 19:42:55.703324 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8zjjz" event={"ID":"865c4795-3878-4277-9ee4-76bda2364bbb","Type":"ContainerStarted","Data":"e08ec34c84cbc5944d8eb8a3eb848598a431b9da6ab79e9d58a1eb695abb4938"} Dec 02 19:42:55 crc kubenswrapper[4792]: I1202 19:42:55.705675 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:42:56 crc kubenswrapper[4792]: I1202 19:42:56.714347 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8zjjz" event={"ID":"865c4795-3878-4277-9ee4-76bda2364bbb","Type":"ContainerStarted","Data":"3ec5a245714dc9364dc0edff480452edaab63af6776a023508f6fb2f3a0544e1"} Dec 02 19:42:57 crc kubenswrapper[4792]: I1202 19:42:57.726579 4792 generic.go:334] "Generic (PLEG): container finished" podID="865c4795-3878-4277-9ee4-76bda2364bbb" containerID="3ec5a245714dc9364dc0edff480452edaab63af6776a023508f6fb2f3a0544e1" exitCode=0 Dec 02 19:42:57 crc kubenswrapper[4792]: I1202 19:42:57.726674 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8zjjz" event={"ID":"865c4795-3878-4277-9ee4-76bda2364bbb","Type":"ContainerDied","Data":"3ec5a245714dc9364dc0edff480452edaab63af6776a023508f6fb2f3a0544e1"} Dec 02 19:42:58 crc kubenswrapper[4792]: I1202 19:42:58.747442 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8zjjz" event={"ID":"865c4795-3878-4277-9ee4-76bda2364bbb","Type":"ContainerStarted","Data":"8806b61253ac95e7b3e665d169b86f76d48c4a2b8a96deaf02b007621decb294"} Dec 02 19:42:58 crc kubenswrapper[4792]: I1202 19:42:58.774754 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8zjjz" podStartSLOduration=3.324844576 podStartE2EDuration="5.774730517s" podCreationTimestamp="2025-12-02 19:42:53 +0000 UTC" firstStartedPulling="2025-12-02 19:42:55.705372682 +0000 UTC m=+4006.478265010" lastFinishedPulling="2025-12-02 19:42:58.155258623 +0000 UTC m=+4008.928150951" observedRunningTime="2025-12-02 19:42:58.769434498 +0000 UTC m=+4009.542326856" watchObservedRunningTime="2025-12-02 19:42:58.774730517 +0000 UTC m=+4009.547622865" Dec 02 19:43:04 crc kubenswrapper[4792]: I1202 19:43:04.224672 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:43:04 crc kubenswrapper[4792]: I1202 19:43:04.225337 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:43:04 crc kubenswrapper[4792]: I1202 19:43:04.280125 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:43:04 crc kubenswrapper[4792]: I1202 19:43:04.910920 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:43:08 crc kubenswrapper[4792]: I1202 19:43:08.272739 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8zjjz"] Dec 02 19:43:08 crc kubenswrapper[4792]: I1202 19:43:08.273425 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8zjjz" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="registry-server" containerID="cri-o://8806b61253ac95e7b3e665d169b86f76d48c4a2b8a96deaf02b007621decb294" gracePeriod=2 Dec 02 19:43:08 crc kubenswrapper[4792]: I1202 19:43:08.877726 4792 generic.go:334] "Generic (PLEG): container finished" podID="865c4795-3878-4277-9ee4-76bda2364bbb" containerID="8806b61253ac95e7b3e665d169b86f76d48c4a2b8a96deaf02b007621decb294" exitCode=0 Dec 02 19:43:08 crc kubenswrapper[4792]: I1202 19:43:08.878041 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8zjjz" event={"ID":"865c4795-3878-4277-9ee4-76bda2364bbb","Type":"ContainerDied","Data":"8806b61253ac95e7b3e665d169b86f76d48c4a2b8a96deaf02b007621decb294"} Dec 02 19:43:08 crc kubenswrapper[4792]: I1202 19:43:08.967362 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.121464 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrf75\" (UniqueName: \"kubernetes.io/projected/865c4795-3878-4277-9ee4-76bda2364bbb-kube-api-access-hrf75\") pod \"865c4795-3878-4277-9ee4-76bda2364bbb\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.121640 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-catalog-content\") pod \"865c4795-3878-4277-9ee4-76bda2364bbb\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.121671 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-utilities\") pod \"865c4795-3878-4277-9ee4-76bda2364bbb\" (UID: \"865c4795-3878-4277-9ee4-76bda2364bbb\") " Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.122702 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-utilities" (OuterVolumeSpecName: "utilities") pod "865c4795-3878-4277-9ee4-76bda2364bbb" (UID: "865c4795-3878-4277-9ee4-76bda2364bbb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.131582 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/865c4795-3878-4277-9ee4-76bda2364bbb-kube-api-access-hrf75" (OuterVolumeSpecName: "kube-api-access-hrf75") pod "865c4795-3878-4277-9ee4-76bda2364bbb" (UID: "865c4795-3878-4277-9ee4-76bda2364bbb"). InnerVolumeSpecName "kube-api-access-hrf75". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.204938 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "865c4795-3878-4277-9ee4-76bda2364bbb" (UID: "865c4795-3878-4277-9ee4-76bda2364bbb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.224280 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.224311 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/865c4795-3878-4277-9ee4-76bda2364bbb-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.224321 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrf75\" (UniqueName: \"kubernetes.io/projected/865c4795-3878-4277-9ee4-76bda2364bbb-kube-api-access-hrf75\") on node \"crc\" DevicePath \"\"" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.894014 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8zjjz" event={"ID":"865c4795-3878-4277-9ee4-76bda2364bbb","Type":"ContainerDied","Data":"e08ec34c84cbc5944d8eb8a3eb848598a431b9da6ab79e9d58a1eb695abb4938"} Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.895761 4792 scope.go:117] "RemoveContainer" containerID="8806b61253ac95e7b3e665d169b86f76d48c4a2b8a96deaf02b007621decb294" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.894550 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8zjjz" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.938544 4792 scope.go:117] "RemoveContainer" containerID="3ec5a245714dc9364dc0edff480452edaab63af6776a023508f6fb2f3a0544e1" Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.945255 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8zjjz"] Dec 02 19:43:09 crc kubenswrapper[4792]: I1202 19:43:09.962399 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8zjjz"] Dec 02 19:43:10 crc kubenswrapper[4792]: I1202 19:43:10.167261 4792 scope.go:117] "RemoveContainer" containerID="2c312144784553ea36a5c3380aaab3661fc2150dc1e7cf9bd47c3355c8ebbb1e" Dec 02 19:43:11 crc kubenswrapper[4792]: I1202 19:43:11.563012 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" path="/var/lib/kubelet/pods/865c4795-3878-4277-9ee4-76bda2364bbb/volumes" Dec 02 19:44:38 crc kubenswrapper[4792]: I1202 19:44:38.080801 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:44:38 crc kubenswrapper[4792]: I1202 19:44:38.081468 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.115287 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kvrfw/must-gather-42htg"] Dec 02 19:44:47 crc kubenswrapper[4792]: E1202 19:44:47.116193 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="extract-content" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.116205 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="extract-content" Dec 02 19:44:47 crc kubenswrapper[4792]: E1202 19:44:47.116225 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="registry-server" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.116230 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="registry-server" Dec 02 19:44:47 crc kubenswrapper[4792]: E1202 19:44:47.116241 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="extract-utilities" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.116247 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="extract-utilities" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.116451 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="865c4795-3878-4277-9ee4-76bda2364bbb" containerName="registry-server" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.117630 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.124701 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-kvrfw"/"openshift-service-ca.crt" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.124902 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-kvrfw"/"kube-root-ca.crt" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.151772 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-kvrfw/must-gather-42htg"] Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.174460 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmjhn\" (UniqueName: \"kubernetes.io/projected/1465f5ca-7bc8-4c1a-b901-1311b06825f5-kube-api-access-lmjhn\") pod \"must-gather-42htg\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.174810 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1465f5ca-7bc8-4c1a-b901-1311b06825f5-must-gather-output\") pod \"must-gather-42htg\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.276761 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmjhn\" (UniqueName: \"kubernetes.io/projected/1465f5ca-7bc8-4c1a-b901-1311b06825f5-kube-api-access-lmjhn\") pod \"must-gather-42htg\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.276876 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1465f5ca-7bc8-4c1a-b901-1311b06825f5-must-gather-output\") pod \"must-gather-42htg\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.277324 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1465f5ca-7bc8-4c1a-b901-1311b06825f5-must-gather-output\") pod \"must-gather-42htg\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.651430 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmjhn\" (UniqueName: \"kubernetes.io/projected/1465f5ca-7bc8-4c1a-b901-1311b06825f5-kube-api-access-lmjhn\") pod \"must-gather-42htg\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:47 crc kubenswrapper[4792]: I1202 19:44:47.734497 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:44:48 crc kubenswrapper[4792]: I1202 19:44:48.209588 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-kvrfw/must-gather-42htg"] Dec 02 19:44:49 crc kubenswrapper[4792]: I1202 19:44:49.052181 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/must-gather-42htg" event={"ID":"1465f5ca-7bc8-4c1a-b901-1311b06825f5","Type":"ContainerStarted","Data":"a2786ad5ea311e9508d52ed8ac75af87649020cab77560afcb687bec2e57b316"} Dec 02 19:44:49 crc kubenswrapper[4792]: I1202 19:44:49.052551 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/must-gather-42htg" event={"ID":"1465f5ca-7bc8-4c1a-b901-1311b06825f5","Type":"ContainerStarted","Data":"9f309923943fe77228437026c9608e15d4c789ef280ac860417e884b641a6ee4"} Dec 02 19:44:49 crc kubenswrapper[4792]: I1202 19:44:49.052567 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/must-gather-42htg" event={"ID":"1465f5ca-7bc8-4c1a-b901-1311b06825f5","Type":"ContainerStarted","Data":"9675126c4ca4c188990a819ab3435ddf1c335d3072701cf84e85c8dd019cb548"} Dec 02 19:44:49 crc kubenswrapper[4792]: I1202 19:44:49.072247 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kvrfw/must-gather-42htg" podStartSLOduration=2.0721999269999998 podStartE2EDuration="2.072199927s" podCreationTimestamp="2025-12-02 19:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:44:49.067148485 +0000 UTC m=+4119.840040823" watchObservedRunningTime="2025-12-02 19:44:49.072199927 +0000 UTC m=+4119.845092265" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.087765 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-k4gcx"] Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.091019 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.093191 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-kvrfw"/"default-dockercfg-6fbfb" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.187922 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd7tk\" (UniqueName: \"kubernetes.io/projected/9f7a7fc8-fd1e-461e-8454-ba857cb46383-kube-api-access-bd7tk\") pod \"crc-debug-k4gcx\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.188107 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f7a7fc8-fd1e-461e-8454-ba857cb46383-host\") pod \"crc-debug-k4gcx\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.290244 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f7a7fc8-fd1e-461e-8454-ba857cb46383-host\") pod \"crc-debug-k4gcx\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.290378 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd7tk\" (UniqueName: \"kubernetes.io/projected/9f7a7fc8-fd1e-461e-8454-ba857cb46383-kube-api-access-bd7tk\") pod \"crc-debug-k4gcx\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.290385 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f7a7fc8-fd1e-461e-8454-ba857cb46383-host\") pod \"crc-debug-k4gcx\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.312505 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd7tk\" (UniqueName: \"kubernetes.io/projected/9f7a7fc8-fd1e-461e-8454-ba857cb46383-kube-api-access-bd7tk\") pod \"crc-debug-k4gcx\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:52 crc kubenswrapper[4792]: I1202 19:44:52.410186 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:44:53 crc kubenswrapper[4792]: I1202 19:44:53.102088 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" event={"ID":"9f7a7fc8-fd1e-461e-8454-ba857cb46383","Type":"ContainerStarted","Data":"366ab343cae03f6096899271bdd3bf2f2555c5c6cd150f73f1f47119ad3566b0"} Dec 02 19:44:53 crc kubenswrapper[4792]: I1202 19:44:53.102671 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" event={"ID":"9f7a7fc8-fd1e-461e-8454-ba857cb46383","Type":"ContainerStarted","Data":"81c9f7547f9ccbe391a8f95b3b91ac1f5c09ab8cbc3ee892f1093cec06efde7c"} Dec 02 19:44:53 crc kubenswrapper[4792]: I1202 19:44:53.121359 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" podStartSLOduration=1.121344077 podStartE2EDuration="1.121344077s" podCreationTimestamp="2025-12-02 19:44:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:44:53.115061812 +0000 UTC m=+4123.887954140" watchObservedRunningTime="2025-12-02 19:44:53.121344077 +0000 UTC m=+4123.894236405" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.198794 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg"] Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.200916 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.203043 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.203046 4792 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.210847 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg"] Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.264390 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75jvh\" (UniqueName: \"kubernetes.io/projected/3a91def0-1ff4-4fb6-8f28-39409bd266b4-kube-api-access-75jvh\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.264433 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3a91def0-1ff4-4fb6-8f28-39409bd266b4-config-volume\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.264498 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3a91def0-1ff4-4fb6-8f28-39409bd266b4-secret-volume\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.366677 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3a91def0-1ff4-4fb6-8f28-39409bd266b4-secret-volume\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.367198 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75jvh\" (UniqueName: \"kubernetes.io/projected/3a91def0-1ff4-4fb6-8f28-39409bd266b4-kube-api-access-75jvh\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.367233 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3a91def0-1ff4-4fb6-8f28-39409bd266b4-config-volume\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.368708 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3a91def0-1ff4-4fb6-8f28-39409bd266b4-config-volume\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.384071 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3a91def0-1ff4-4fb6-8f28-39409bd266b4-secret-volume\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:00 crc kubenswrapper[4792]: I1202 19:45:00.388844 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75jvh\" (UniqueName: \"kubernetes.io/projected/3a91def0-1ff4-4fb6-8f28-39409bd266b4-kube-api-access-75jvh\") pod \"collect-profiles-29411745-brmbg\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:01 crc kubenswrapper[4792]: I1202 19:45:01.840003 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:02 crc kubenswrapper[4792]: I1202 19:45:02.315455 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg"] Dec 02 19:45:03 crc kubenswrapper[4792]: I1202 19:45:03.196097 4792 generic.go:334] "Generic (PLEG): container finished" podID="3a91def0-1ff4-4fb6-8f28-39409bd266b4" containerID="08ca2f7591d99e0e05900046774658ed9773cc5f4402efec0d925cec7fdf8f8e" exitCode=0 Dec 02 19:45:03 crc kubenswrapper[4792]: I1202 19:45:03.196330 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" event={"ID":"3a91def0-1ff4-4fb6-8f28-39409bd266b4","Type":"ContainerDied","Data":"08ca2f7591d99e0e05900046774658ed9773cc5f4402efec0d925cec7fdf8f8e"} Dec 02 19:45:03 crc kubenswrapper[4792]: I1202 19:45:03.196356 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" event={"ID":"3a91def0-1ff4-4fb6-8f28-39409bd266b4","Type":"ContainerStarted","Data":"1f0ae75de46ef3f5e69292d0dd4ffd853c0e4953bedda94a3275c4f43ee31339"} Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.718887 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.762302 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3a91def0-1ff4-4fb6-8f28-39409bd266b4-secret-volume\") pod \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.762671 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3a91def0-1ff4-4fb6-8f28-39409bd266b4-config-volume\") pod \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.762802 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75jvh\" (UniqueName: \"kubernetes.io/projected/3a91def0-1ff4-4fb6-8f28-39409bd266b4-kube-api-access-75jvh\") pod \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\" (UID: \"3a91def0-1ff4-4fb6-8f28-39409bd266b4\") " Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.763918 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a91def0-1ff4-4fb6-8f28-39409bd266b4-config-volume" (OuterVolumeSpecName: "config-volume") pod "3a91def0-1ff4-4fb6-8f28-39409bd266b4" (UID: "3a91def0-1ff4-4fb6-8f28-39409bd266b4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.779134 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a91def0-1ff4-4fb6-8f28-39409bd266b4-kube-api-access-75jvh" (OuterVolumeSpecName: "kube-api-access-75jvh") pod "3a91def0-1ff4-4fb6-8f28-39409bd266b4" (UID: "3a91def0-1ff4-4fb6-8f28-39409bd266b4"). InnerVolumeSpecName "kube-api-access-75jvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.779582 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a91def0-1ff4-4fb6-8f28-39409bd266b4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3a91def0-1ff4-4fb6-8f28-39409bd266b4" (UID: "3a91def0-1ff4-4fb6-8f28-39409bd266b4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.864847 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75jvh\" (UniqueName: \"kubernetes.io/projected/3a91def0-1ff4-4fb6-8f28-39409bd266b4-kube-api-access-75jvh\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.864906 4792 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3a91def0-1ff4-4fb6-8f28-39409bd266b4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:04 crc kubenswrapper[4792]: I1202 19:45:04.864916 4792 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3a91def0-1ff4-4fb6-8f28-39409bd266b4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:05 crc kubenswrapper[4792]: I1202 19:45:05.219982 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" event={"ID":"3a91def0-1ff4-4fb6-8f28-39409bd266b4","Type":"ContainerDied","Data":"1f0ae75de46ef3f5e69292d0dd4ffd853c0e4953bedda94a3275c4f43ee31339"} Dec 02 19:45:05 crc kubenswrapper[4792]: I1202 19:45:05.220028 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f0ae75de46ef3f5e69292d0dd4ffd853c0e4953bedda94a3275c4f43ee31339" Dec 02 19:45:05 crc kubenswrapper[4792]: I1202 19:45:05.220088 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411745-brmbg" Dec 02 19:45:05 crc kubenswrapper[4792]: I1202 19:45:05.800573 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5"] Dec 02 19:45:05 crc kubenswrapper[4792]: I1202 19:45:05.811630 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411700-mhwr5"] Dec 02 19:45:07 crc kubenswrapper[4792]: I1202 19:45:07.557723 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb6b8384-e7ae-45ac-a047-6a9d61275cff" path="/var/lib/kubelet/pods/fb6b8384-e7ae-45ac-a047-6a9d61275cff/volumes" Dec 02 19:45:08 crc kubenswrapper[4792]: I1202 19:45:08.080959 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:45:08 crc kubenswrapper[4792]: I1202 19:45:08.081025 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:45:28 crc kubenswrapper[4792]: I1202 19:45:28.736630 4792 generic.go:334] "Generic (PLEG): container finished" podID="9f7a7fc8-fd1e-461e-8454-ba857cb46383" containerID="366ab343cae03f6096899271bdd3bf2f2555c5c6cd150f73f1f47119ad3566b0" exitCode=0 Dec 02 19:45:28 crc kubenswrapper[4792]: I1202 19:45:28.736683 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" event={"ID":"9f7a7fc8-fd1e-461e-8454-ba857cb46383","Type":"ContainerDied","Data":"366ab343cae03f6096899271bdd3bf2f2555c5c6cd150f73f1f47119ad3566b0"} Dec 02 19:45:29 crc kubenswrapper[4792]: I1202 19:45:29.861746 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:45:29 crc kubenswrapper[4792]: I1202 19:45:29.897640 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-k4gcx"] Dec 02 19:45:29 crc kubenswrapper[4792]: I1202 19:45:29.907004 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-k4gcx"] Dec 02 19:45:29 crc kubenswrapper[4792]: I1202 19:45:29.989211 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f7a7fc8-fd1e-461e-8454-ba857cb46383-host\") pod \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " Dec 02 19:45:29 crc kubenswrapper[4792]: I1202 19:45:29.989263 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd7tk\" (UniqueName: \"kubernetes.io/projected/9f7a7fc8-fd1e-461e-8454-ba857cb46383-kube-api-access-bd7tk\") pod \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\" (UID: \"9f7a7fc8-fd1e-461e-8454-ba857cb46383\") " Dec 02 19:45:29 crc kubenswrapper[4792]: I1202 19:45:29.990745 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f7a7fc8-fd1e-461e-8454-ba857cb46383-host" (OuterVolumeSpecName: "host") pod "9f7a7fc8-fd1e-461e-8454-ba857cb46383" (UID: "9f7a7fc8-fd1e-461e-8454-ba857cb46383"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 19:45:29 crc kubenswrapper[4792]: I1202 19:45:29.997783 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f7a7fc8-fd1e-461e-8454-ba857cb46383-kube-api-access-bd7tk" (OuterVolumeSpecName: "kube-api-access-bd7tk") pod "9f7a7fc8-fd1e-461e-8454-ba857cb46383" (UID: "9f7a7fc8-fd1e-461e-8454-ba857cb46383"). InnerVolumeSpecName "kube-api-access-bd7tk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:45:30 crc kubenswrapper[4792]: I1202 19:45:30.092077 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9f7a7fc8-fd1e-461e-8454-ba857cb46383-host\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:30 crc kubenswrapper[4792]: I1202 19:45:30.092121 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd7tk\" (UniqueName: \"kubernetes.io/projected/9f7a7fc8-fd1e-461e-8454-ba857cb46383-kube-api-access-bd7tk\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:30 crc kubenswrapper[4792]: I1202 19:45:30.761051 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81c9f7547f9ccbe391a8f95b3b91ac1f5c09ab8cbc3ee892f1093cec06efde7c" Dec 02 19:45:30 crc kubenswrapper[4792]: I1202 19:45:30.761660 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-k4gcx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.152912 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-4cgxx"] Dec 02 19:45:31 crc kubenswrapper[4792]: E1202 19:45:31.153352 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7a7fc8-fd1e-461e-8454-ba857cb46383" containerName="container-00" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.153368 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7a7fc8-fd1e-461e-8454-ba857cb46383" containerName="container-00" Dec 02 19:45:31 crc kubenswrapper[4792]: E1202 19:45:31.153429 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a91def0-1ff4-4fb6-8f28-39409bd266b4" containerName="collect-profiles" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.153438 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a91def0-1ff4-4fb6-8f28-39409bd266b4" containerName="collect-profiles" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.153678 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f7a7fc8-fd1e-461e-8454-ba857cb46383" containerName="container-00" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.153711 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a91def0-1ff4-4fb6-8f28-39409bd266b4" containerName="collect-profiles" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.154656 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.156474 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-kvrfw"/"default-dockercfg-6fbfb" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.255867 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcv65\" (UniqueName: \"kubernetes.io/projected/543f8b10-380e-4d16-8636-918345b22d47-kube-api-access-zcv65\") pod \"crc-debug-4cgxx\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.256440 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/543f8b10-380e-4d16-8636-918345b22d47-host\") pod \"crc-debug-4cgxx\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.358715 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/543f8b10-380e-4d16-8636-918345b22d47-host\") pod \"crc-debug-4cgxx\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.358897 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcv65\" (UniqueName: \"kubernetes.io/projected/543f8b10-380e-4d16-8636-918345b22d47-kube-api-access-zcv65\") pod \"crc-debug-4cgxx\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.358898 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/543f8b10-380e-4d16-8636-918345b22d47-host\") pod \"crc-debug-4cgxx\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.378271 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcv65\" (UniqueName: \"kubernetes.io/projected/543f8b10-380e-4d16-8636-918345b22d47-kube-api-access-zcv65\") pod \"crc-debug-4cgxx\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.469313 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.550504 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f7a7fc8-fd1e-461e-8454-ba857cb46383" path="/var/lib/kubelet/pods/9f7a7fc8-fd1e-461e-8454-ba857cb46383/volumes" Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.770228 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" event={"ID":"543f8b10-380e-4d16-8636-918345b22d47","Type":"ContainerStarted","Data":"a3d051d39a8754e0f1a7474a4cb4eed43e2554520f94d386267c1f1fb4428b64"} Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.770589 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" event={"ID":"543f8b10-380e-4d16-8636-918345b22d47","Type":"ContainerStarted","Data":"1fd4292cf7ae870a1810021ec669e3a83dfc5d903eb7b0e63f367b763ef9b5e5"} Dec 02 19:45:31 crc kubenswrapper[4792]: I1202 19:45:31.786814 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" podStartSLOduration=0.786799719 podStartE2EDuration="786.799719ms" podCreationTimestamp="2025-12-02 19:45:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 19:45:31.784110418 +0000 UTC m=+4162.557002746" watchObservedRunningTime="2025-12-02 19:45:31.786799719 +0000 UTC m=+4162.559692047" Dec 02 19:45:32 crc kubenswrapper[4792]: I1202 19:45:32.780756 4792 generic.go:334] "Generic (PLEG): container finished" podID="543f8b10-380e-4d16-8636-918345b22d47" containerID="a3d051d39a8754e0f1a7474a4cb4eed43e2554520f94d386267c1f1fb4428b64" exitCode=0 Dec 02 19:45:32 crc kubenswrapper[4792]: I1202 19:45:32.780843 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" event={"ID":"543f8b10-380e-4d16-8636-918345b22d47","Type":"ContainerDied","Data":"a3d051d39a8754e0f1a7474a4cb4eed43e2554520f94d386267c1f1fb4428b64"} Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.274794 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.305993 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-4cgxx"] Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.314415 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-4cgxx"] Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.417999 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/543f8b10-380e-4d16-8636-918345b22d47-host\") pod \"543f8b10-380e-4d16-8636-918345b22d47\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.418113 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/543f8b10-380e-4d16-8636-918345b22d47-host" (OuterVolumeSpecName: "host") pod "543f8b10-380e-4d16-8636-918345b22d47" (UID: "543f8b10-380e-4d16-8636-918345b22d47"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.418230 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcv65\" (UniqueName: \"kubernetes.io/projected/543f8b10-380e-4d16-8636-918345b22d47-kube-api-access-zcv65\") pod \"543f8b10-380e-4d16-8636-918345b22d47\" (UID: \"543f8b10-380e-4d16-8636-918345b22d47\") " Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.418668 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/543f8b10-380e-4d16-8636-918345b22d47-host\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.430810 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/543f8b10-380e-4d16-8636-918345b22d47-kube-api-access-zcv65" (OuterVolumeSpecName: "kube-api-access-zcv65") pod "543f8b10-380e-4d16-8636-918345b22d47" (UID: "543f8b10-380e-4d16-8636-918345b22d47"). InnerVolumeSpecName "kube-api-access-zcv65". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.520287 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcv65\" (UniqueName: \"kubernetes.io/projected/543f8b10-380e-4d16-8636-918345b22d47-kube-api-access-zcv65\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.800294 4792 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fd4292cf7ae870a1810021ec669e3a83dfc5d903eb7b0e63f367b763ef9b5e5" Dec 02 19:45:34 crc kubenswrapper[4792]: I1202 19:45:34.800366 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-4cgxx" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.550480 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="543f8b10-380e-4d16-8636-918345b22d47" path="/var/lib/kubelet/pods/543f8b10-380e-4d16-8636-918345b22d47/volumes" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.567338 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-cc84b"] Dec 02 19:45:35 crc kubenswrapper[4792]: E1202 19:45:35.567758 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="543f8b10-380e-4d16-8636-918345b22d47" containerName="container-00" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.567774 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="543f8b10-380e-4d16-8636-918345b22d47" containerName="container-00" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.568006 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="543f8b10-380e-4d16-8636-918345b22d47" containerName="container-00" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.568723 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.570746 4792 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-kvrfw"/"default-dockercfg-6fbfb" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.745399 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/510ae30d-32f3-4072-8f4a-585a13117f55-host\") pod \"crc-debug-cc84b\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.745464 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6wgj\" (UniqueName: \"kubernetes.io/projected/510ae30d-32f3-4072-8f4a-585a13117f55-kube-api-access-d6wgj\") pod \"crc-debug-cc84b\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.847411 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/510ae30d-32f3-4072-8f4a-585a13117f55-host\") pod \"crc-debug-cc84b\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.847473 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6wgj\" (UniqueName: \"kubernetes.io/projected/510ae30d-32f3-4072-8f4a-585a13117f55-kube-api-access-d6wgj\") pod \"crc-debug-cc84b\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:35 crc kubenswrapper[4792]: I1202 19:45:35.847548 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/510ae30d-32f3-4072-8f4a-585a13117f55-host\") pod \"crc-debug-cc84b\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:36 crc kubenswrapper[4792]: I1202 19:45:36.357395 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6wgj\" (UniqueName: \"kubernetes.io/projected/510ae30d-32f3-4072-8f4a-585a13117f55-kube-api-access-d6wgj\") pod \"crc-debug-cc84b\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:36 crc kubenswrapper[4792]: I1202 19:45:36.483417 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:36 crc kubenswrapper[4792]: W1202 19:45:36.509495 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod510ae30d_32f3_4072_8f4a_585a13117f55.slice/crio-24392b3245f29c53af91172cb98ee1514cf94478d221aa83ab66fbdbec6c4bdc WatchSource:0}: Error finding container 24392b3245f29c53af91172cb98ee1514cf94478d221aa83ab66fbdbec6c4bdc: Status 404 returned error can't find the container with id 24392b3245f29c53af91172cb98ee1514cf94478d221aa83ab66fbdbec6c4bdc Dec 02 19:45:36 crc kubenswrapper[4792]: I1202 19:45:36.820576 4792 generic.go:334] "Generic (PLEG): container finished" podID="510ae30d-32f3-4072-8f4a-585a13117f55" containerID="0ffa222007e3d5b7ca00c19198e81a6d8750bc7867d0d92f44e24ea636b475d3" exitCode=0 Dec 02 19:45:36 crc kubenswrapper[4792]: I1202 19:45:36.820662 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-cc84b" event={"ID":"510ae30d-32f3-4072-8f4a-585a13117f55","Type":"ContainerDied","Data":"0ffa222007e3d5b7ca00c19198e81a6d8750bc7867d0d92f44e24ea636b475d3"} Dec 02 19:45:36 crc kubenswrapper[4792]: I1202 19:45:36.820913 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/crc-debug-cc84b" event={"ID":"510ae30d-32f3-4072-8f4a-585a13117f55","Type":"ContainerStarted","Data":"24392b3245f29c53af91172cb98ee1514cf94478d221aa83ab66fbdbec6c4bdc"} Dec 02 19:45:36 crc kubenswrapper[4792]: I1202 19:45:36.856241 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-cc84b"] Dec 02 19:45:36 crc kubenswrapper[4792]: I1202 19:45:36.870394 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kvrfw/crc-debug-cc84b"] Dec 02 19:45:37 crc kubenswrapper[4792]: I1202 19:45:37.946549 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:37 crc kubenswrapper[4792]: I1202 19:45:37.992794 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6wgj\" (UniqueName: \"kubernetes.io/projected/510ae30d-32f3-4072-8f4a-585a13117f55-kube-api-access-d6wgj\") pod \"510ae30d-32f3-4072-8f4a-585a13117f55\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " Dec 02 19:45:37 crc kubenswrapper[4792]: I1202 19:45:37.992899 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/510ae30d-32f3-4072-8f4a-585a13117f55-host\") pod \"510ae30d-32f3-4072-8f4a-585a13117f55\" (UID: \"510ae30d-32f3-4072-8f4a-585a13117f55\") " Dec 02 19:45:37 crc kubenswrapper[4792]: I1202 19:45:37.993019 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/510ae30d-32f3-4072-8f4a-585a13117f55-host" (OuterVolumeSpecName: "host") pod "510ae30d-32f3-4072-8f4a-585a13117f55" (UID: "510ae30d-32f3-4072-8f4a-585a13117f55"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 19:45:37 crc kubenswrapper[4792]: I1202 19:45:37.993488 4792 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/510ae30d-32f3-4072-8f4a-585a13117f55-host\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:37 crc kubenswrapper[4792]: I1202 19:45:37.998863 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/510ae30d-32f3-4072-8f4a-585a13117f55-kube-api-access-d6wgj" (OuterVolumeSpecName: "kube-api-access-d6wgj") pod "510ae30d-32f3-4072-8f4a-585a13117f55" (UID: "510ae30d-32f3-4072-8f4a-585a13117f55"). InnerVolumeSpecName "kube-api-access-d6wgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.081199 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.081263 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.081319 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.082269 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"91980fd36fc859ce6a68107c8e14bd4f51882c72f43243d77f08d00f806eea0a"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.082339 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://91980fd36fc859ce6a68107c8e14bd4f51882c72f43243d77f08d00f806eea0a" gracePeriod=600 Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.094174 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6wgj\" (UniqueName: \"kubernetes.io/projected/510ae30d-32f3-4072-8f4a-585a13117f55-kube-api-access-d6wgj\") on node \"crc\" DevicePath \"\"" Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.841415 4792 scope.go:117] "RemoveContainer" containerID="0ffa222007e3d5b7ca00c19198e81a6d8750bc7867d0d92f44e24ea636b475d3" Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.841445 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/crc-debug-cc84b" Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.845838 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="91980fd36fc859ce6a68107c8e14bd4f51882c72f43243d77f08d00f806eea0a" exitCode=0 Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.845877 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"91980fd36fc859ce6a68107c8e14bd4f51882c72f43243d77f08d00f806eea0a"} Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.845902 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624"} Dec 02 19:45:38 crc kubenswrapper[4792]: I1202 19:45:38.861323 4792 scope.go:117] "RemoveContainer" containerID="0303b4a8c6955524b52ff6e9f01a7697e4973f95ad9a7c905535eb8cc7c64b49" Dec 02 19:45:39 crc kubenswrapper[4792]: I1202 19:45:39.563261 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="510ae30d-32f3-4072-8f4a-585a13117f55" path="/var/lib/kubelet/pods/510ae30d-32f3-4072-8f4a-585a13117f55/volumes" Dec 02 19:45:41 crc kubenswrapper[4792]: I1202 19:45:41.128449 4792 scope.go:117] "RemoveContainer" containerID="6e6ffb95697459136beac3289cc65741a59ce250a4421c887716076e7b7bf5bd" Dec 02 19:46:18 crc kubenswrapper[4792]: I1202 19:46:18.902802 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/init-config-reloader/0.log" Dec 02 19:46:19 crc kubenswrapper[4792]: I1202 19:46:19.798974 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/init-config-reloader/0.log" Dec 02 19:46:19 crc kubenswrapper[4792]: I1202 19:46:19.799245 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/config-reloader/0.log" Dec 02 19:46:19 crc kubenswrapper[4792]: I1202 19:46:19.848585 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_26d86aa8-79aa-4d9b-ac24-155924920219/alertmanager/0.log" Dec 02 19:46:19 crc kubenswrapper[4792]: I1202 19:46:19.969695 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75d777bcc8-l485p_260c73f4-d8d2-4178-924a-81703068a4f6/barbican-api-log/0.log" Dec 02 19:46:19 crc kubenswrapper[4792]: I1202 19:46:19.978422 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75d777bcc8-l485p_260c73f4-d8d2-4178-924a-81703068a4f6/barbican-api/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.056866 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-bdff8d974-fdcc5_6e40d936-c5d2-4491-b5c5-9794c4fb73b1/barbican-keystone-listener/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.222737 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-bdff8d974-fdcc5_6e40d936-c5d2-4491-b5c5-9794c4fb73b1/barbican-keystone-listener-log/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.250816 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6d85ccb45-9bkkd_bbaa6700-f41c-49a4-8593-d0d6ba1a6376/barbican-worker-log/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.261281 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6d85ccb45-9bkkd_bbaa6700-f41c-49a4-8593-d0d6ba1a6376/barbican-worker/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.474238 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/ceilometer-central-agent/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.477341 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-2bppw_e29a810b-8a51-4d2c-ab9e-61315499b272/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.633334 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/ceilometer-notification-agent/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.674814 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/sg-core/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.682752 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_45ee1c9c-d7ad-4b7b-82fe-5fdc7d3d6ab2/proxy-httpd/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.849669 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2b99d2b7-ae25-4088-8cae-a3f6151e735f/cinder-api-log/0.log" Dec 02 19:46:20 crc kubenswrapper[4792]: I1202 19:46:20.898755 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2b99d2b7-ae25-4088-8cae-a3f6151e735f/cinder-api/0.log" Dec 02 19:46:21 crc kubenswrapper[4792]: I1202 19:46:21.000059 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4e9df765-ce65-43eb-bdcc-344fb7f68889/cinder-scheduler/0.log" Dec 02 19:46:21 crc kubenswrapper[4792]: I1202 19:46:21.155837 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4e9df765-ce65-43eb-bdcc-344fb7f68889/probe/0.log" Dec 02 19:46:21 crc kubenswrapper[4792]: I1202 19:46:21.229038 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_2cbb2471-1100-45f2-9279-c15ef98e34cf/cloudkitty-api-log/0.log" Dec 02 19:46:21 crc kubenswrapper[4792]: I1202 19:46:21.285031 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_2cbb2471-1100-45f2-9279-c15ef98e34cf/cloudkitty-api/0.log" Dec 02 19:46:21 crc kubenswrapper[4792]: I1202 19:46:21.979683 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_eab5f6f8-38fe-40ac-8407-4fc5044eba84/loki-compactor/0.log" Dec 02 19:46:22 crc kubenswrapper[4792]: I1202 19:46:22.017363 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-56cd74f89f-lgcfh_c11c6af2-cd99-41e8-b6cf-b86ab025bbfa/loki-distributor/0.log" Dec 02 19:46:22 crc kubenswrapper[4792]: I1202 19:46:22.151184 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-wsd5z_2f112377-5fcb-424f-9fa1-f92ab0608d82/gateway/0.log" Dec 02 19:46:22 crc kubenswrapper[4792]: I1202 19:46:22.226169 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-wz9ls_a7e6a14f-abb0-4d3b-8aa3-b1d4ca0163b4/gateway/0.log" Dec 02 19:46:22 crc kubenswrapper[4792]: I1202 19:46:22.373932 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_92495185-21e2-4db2-9b49-6c2b0267c324/loki-index-gateway/0.log" Dec 02 19:46:22 crc kubenswrapper[4792]: I1202 19:46:22.667465 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-779849886d-n28fq_29663f5c-6fe7-42d5-8d53-c8d900e36a9c/loki-query-frontend/0.log" Dec 02 19:46:22 crc kubenswrapper[4792]: I1202 19:46:22.810678 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_63ad1bca-0ff4-4694-ab0a-56e8f5366d88/loki-ingester/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.030074 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-sgg5b_e3237d97-ad57-4313-8210-fa48b0740a3c/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.192841 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-z8695_81a6ef07-63e3-4982-896f-c40102622a62/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.268780 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-lrjg9_c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25/init/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.502958 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-lrjg9_c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25/init/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.665360 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-lrjg9_c05b199f-a2bf-4bed-b0e9-41a7fa9d5e25/dnsmasq-dns/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.801121 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-548665d79b-vwc9f_376a394c-12c9-4fa9-b24a-841a6b05ba0b/loki-querier/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.808996 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7c8wk_6263b59c-7edd-49eb-aac3-42fd1c5da951/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.952800 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5d2d8dfe-ec8c-4c51-850a-1e25165a6826/glance-httpd/0.log" Dec 02 19:46:23 crc kubenswrapper[4792]: I1202 19:46:23.985024 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5d2d8dfe-ec8c-4c51-850a-1e25165a6826/glance-log/0.log" Dec 02 19:46:24 crc kubenswrapper[4792]: I1202 19:46:24.075393 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c0b6301c-c44b-4f68-b11f-59e05346f689/glance-httpd/0.log" Dec 02 19:46:24 crc kubenswrapper[4792]: I1202 19:46:24.199073 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c0b6301c-c44b-4f68-b11f-59e05346f689/glance-log/0.log" Dec 02 19:46:24 crc kubenswrapper[4792]: I1202 19:46:24.330819 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-mfrgx_0d0d81cf-d181-4589-ab97-56eb22868c2f/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:24 crc kubenswrapper[4792]: I1202 19:46:24.473859 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-s9m9g_927db467-4fe0-45db-bdfa-9f8de3f72259/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:24 crc kubenswrapper[4792]: I1202 19:46:24.724939 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29411701-8bqdq_7ba4a311-3a7b-4575-b183-12bb721e71a1/keystone-cron/0.log" Dec 02 19:46:24 crc kubenswrapper[4792]: I1202 19:46:24.915761 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-54b9cc4f54-2nnzj_3baa86cf-f5d1-40e8-90cc-227ecfae98cf/keystone-api/0.log" Dec 02 19:46:24 crc kubenswrapper[4792]: I1202 19:46:24.923801 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e9a3a1a3-54f5-4734-aade-5e64bcad49a4/kube-state-metrics/0.log" Dec 02 19:46:25 crc kubenswrapper[4792]: I1202 19:46:25.135413 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-chd8r_c0a31132-972a-4e92-b005-de8cacadfe2e/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:25 crc kubenswrapper[4792]: I1202 19:46:25.494500 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c7cc4d64c-s885f_034594f3-2d13-4657-9426-449348df341f/neutron-httpd/0.log" Dec 02 19:46:25 crc kubenswrapper[4792]: I1202 19:46:25.654284 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c7cc4d64c-s885f_034594f3-2d13-4657-9426-449348df341f/neutron-api/0.log" Dec 02 19:46:25 crc kubenswrapper[4792]: I1202 19:46:25.738811 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-bdf9p_bf6f87b0-64cb-4aa6-87b5-f4496dd79953/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:26 crc kubenswrapper[4792]: I1202 19:46:26.262339 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_6ffde9a1-e11f-4216-890b-7992f6e1b84c/nova-api-log/0.log" Dec 02 19:46:26 crc kubenswrapper[4792]: I1202 19:46:26.606671 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c6ed774e-25d0-47b5-8dd4-1113a9310d29/nova-cell0-conductor-conductor/0.log" Dec 02 19:46:26 crc kubenswrapper[4792]: I1202 19:46:26.620496 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_6ffde9a1-e11f-4216-890b-7992f6e1b84c/nova-api-api/0.log" Dec 02 19:46:26 crc kubenswrapper[4792]: I1202 19:46:26.900863 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_a1884f2e-062d-4a08-aff1-59d9316bfff8/nova-cell1-conductor-conductor/0.log" Dec 02 19:46:27 crc kubenswrapper[4792]: I1202 19:46:27.051123 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_78a5099a-bc49-427d-b2c5-46adcda0e3e9/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 19:46:27 crc kubenswrapper[4792]: I1202 19:46:27.231087 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-bsnvd_5516b9b1-aeb0-40a1-9eac-5c7799b85132/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:27 crc kubenswrapper[4792]: I1202 19:46:27.353114 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f0928111-92a0-4459-896b-507add4ebc25/nova-metadata-log/0.log" Dec 02 19:46:27 crc kubenswrapper[4792]: I1202 19:46:27.858241 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_15549a23-1a53-41b1-84f5-a7bfda08faed/nova-scheduler-scheduler/0.log" Dec 02 19:46:27 crc kubenswrapper[4792]: I1202 19:46:27.956353 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91bf1c70-0d0f-49f9-aae7-59865a7abd26/mysql-bootstrap/0.log" Dec 02 19:46:28 crc kubenswrapper[4792]: I1202 19:46:28.163540 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91bf1c70-0d0f-49f9-aae7-59865a7abd26/galera/0.log" Dec 02 19:46:28 crc kubenswrapper[4792]: I1202 19:46:28.175846 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91bf1c70-0d0f-49f9-aae7-59865a7abd26/mysql-bootstrap/0.log" Dec 02 19:46:28 crc kubenswrapper[4792]: I1202 19:46:28.415047 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c5e3683-f4d1-4f32-8c6d-ecc11415c660/mysql-bootstrap/0.log" Dec 02 19:46:28 crc kubenswrapper[4792]: I1202 19:46:28.613679 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c5e3683-f4d1-4f32-8c6d-ecc11415c660/mysql-bootstrap/0.log" Dec 02 19:46:28 crc kubenswrapper[4792]: I1202 19:46:28.626454 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0c5e3683-f4d1-4f32-8c6d-ecc11415c660/galera/0.log" Dec 02 19:46:28 crc kubenswrapper[4792]: I1202 19:46:28.809837 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_5703c717-1bce-4ccc-aff7-16c5fe72e724/openstackclient/0.log" Dec 02 19:46:29 crc kubenswrapper[4792]: I1202 19:46:29.044942 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-nvx6l_0a44f4be-0f5d-45dc-9cb0-b4705d150c1a/openstack-network-exporter/0.log" Dec 02 19:46:29 crc kubenswrapper[4792]: I1202 19:46:29.259665 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f0928111-92a0-4459-896b-507add4ebc25/nova-metadata-metadata/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.165215 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_b3b138aa-fe70-4c2d-9f02-64a8f5a96ae1/cloudkitty-proc/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.200098 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovsdb-server-init/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.405634 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovs-vswitchd/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.409310 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovsdb-server/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.422240 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gpsrm_d0abc322-ef0d-468b-9d23-4e2acd50b51a/ovsdb-server-init/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.647723 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-s44lp_2a5ad51d-6996-42c0-b156-600ff9dc7782/ovn-controller/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.652691 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-fdm4x_c5e968fa-782e-49cc-a729-ebf2f94b2bb3/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.838406 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_69c2b30c-76db-4d6d-a091-3a86040f34fd/openstack-network-exporter/0.log" Dec 02 19:46:30 crc kubenswrapper[4792]: I1202 19:46:30.868644 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_69c2b30c-76db-4d6d-a091-3a86040f34fd/ovn-northd/0.log" Dec 02 19:46:31 crc kubenswrapper[4792]: I1202 19:46:31.089201 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_db787f15-5115-48a8-9443-93f5da555d2a/ovsdbserver-nb/0.log" Dec 02 19:46:31 crc kubenswrapper[4792]: I1202 19:46:31.114669 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_db787f15-5115-48a8-9443-93f5da555d2a/openstack-network-exporter/0.log" Dec 02 19:46:31 crc kubenswrapper[4792]: I1202 19:46:31.222787 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6cfe9a05-cb43-47d3-84f8-95642cd098ec/openstack-network-exporter/0.log" Dec 02 19:46:31 crc kubenswrapper[4792]: I1202 19:46:31.961907 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6cfe9a05-cb43-47d3-84f8-95642cd098ec/ovsdbserver-sb/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.021222 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55bc995c96-sn8lv_a20a6e66-0ccb-41ae-a2ec-904e1dcada7b/placement-api/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.062653 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55bc995c96-sn8lv_a20a6e66-0ccb-41ae-a2ec-904e1dcada7b/placement-log/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.222031 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/init-config-reloader/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.402289 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/config-reloader/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.442732 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/init-config-reloader/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.474645 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/thanos-sidecar/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.511129 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5d36d473-c89a-496a-ab27-d65535edb0ec/prometheus/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.675926 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c43c55d9-74e9-4158-a193-ee8ead807ad7/setup-container/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.906165 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c43c55d9-74e9-4158-a193-ee8ead807ad7/setup-container/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.962378 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_197e738b-95d3-4250-b16a-e70331f46ba5/setup-container/0.log" Dec 02 19:46:32 crc kubenswrapper[4792]: I1202 19:46:32.986881 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c43c55d9-74e9-4158-a193-ee8ead807ad7/rabbitmq/0.log" Dec 02 19:46:33 crc kubenswrapper[4792]: I1202 19:46:33.219086 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_197e738b-95d3-4250-b16a-e70331f46ba5/setup-container/0.log" Dec 02 19:46:33 crc kubenswrapper[4792]: I1202 19:46:33.283584 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_197e738b-95d3-4250-b16a-e70331f46ba5/rabbitmq/0.log" Dec 02 19:46:33 crc kubenswrapper[4792]: I1202 19:46:33.313611 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-pkxk9_5f606874-5cd7-4b0c-b092-c2a2d2e94728/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:33 crc kubenswrapper[4792]: I1202 19:46:33.573293 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-6cgm9_8133173a-74f1-44d0-ab0d-609e15a2754a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:33 crc kubenswrapper[4792]: I1202 19:46:33.600149 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-khrbn_a93efefe-ec0a-45dc-8276-6d99cb2b4db8/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:33 crc kubenswrapper[4792]: I1202 19:46:33.763640 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-bt7b8_38b7b309-7222-4bf3-b8c1-33b0d01f7c29/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:33 crc kubenswrapper[4792]: I1202 19:46:33.848828 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-xdsm4_e957a792-604a-439d-8fa3-271edf600cac/ssh-known-hosts-edpm-deployment/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.111774 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-79bc665747-kkc2q_e3263958-3718-4ceb-8751-6fa73a1a60f5/proxy-httpd/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.113683 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-79bc665747-kkc2q_e3263958-3718-4ceb-8751-6fa73a1a60f5/proxy-server/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.202846 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-jd92w_865e7f48-168c-4b42-a6a0-308250071747/swift-ring-rebalance/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.354155 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-reaper/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.386814 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-auditor/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.437638 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-replicator/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.568696 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-auditor/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.611866 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/account-server/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.691606 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-replicator/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.697471 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-server/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.774946 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/container-updater/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.865316 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-auditor/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.941599 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-expirer/0.log" Dec 02 19:46:34 crc kubenswrapper[4792]: I1202 19:46:34.954686 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-replicator/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.005774 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-server/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.088580 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/object-updater/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.159687 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/rsync/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.174900 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_252fab2e-fcb7-43e8-940a-48adc8f4ebd5/swift-recon-cron/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.362433 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-w9rl4_6aeeaf00-b476-4d91-a807-92fb47391287/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.425015 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_22c610ff-ae47-48ac-8fea-c3ab17f23106/tempest-tests-tempest-tests-runner/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.557216 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_9484b035-7d4b-487e-b070-f935fb55389e/test-operator-logs-container/0.log" Dec 02 19:46:35 crc kubenswrapper[4792]: I1202 19:46:35.671682 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-t4l49_c16326ab-5471-4840-98cc-670d5601a873/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 19:46:42 crc kubenswrapper[4792]: I1202 19:46:42.525231 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_2951974e-17c4-4cf6-b244-6efc7a6fc742/memcached/0.log" Dec 02 19:47:05 crc kubenswrapper[4792]: I1202 19:47:05.486555 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-89kmd_daa6dcd6-39c7-44fc-9754-7de254748ec3/manager/0.log" Dec 02 19:47:05 crc kubenswrapper[4792]: I1202 19:47:05.492899 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-89kmd_daa6dcd6-39c7-44fc-9754-7de254748ec3/kube-rbac-proxy/0.log" Dec 02 19:47:05 crc kubenswrapper[4792]: I1202 19:47:05.636949 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-fmggq_9f1a320f-5255-4fc4-b973-39ce2aee3bae/kube-rbac-proxy/0.log" Dec 02 19:47:05 crc kubenswrapper[4792]: I1202 19:47:05.672512 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-fmggq_9f1a320f-5255-4fc4-b973-39ce2aee3bae/manager/0.log" Dec 02 19:47:05 crc kubenswrapper[4792]: I1202 19:47:05.803278 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-7qgqw_fce25a63-01bd-458a-9567-f08f710abec9/manager/0.log" Dec 02 19:47:05 crc kubenswrapper[4792]: I1202 19:47:05.817858 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-7qgqw_fce25a63-01bd-458a-9567-f08f710abec9/kube-rbac-proxy/0.log" Dec 02 19:47:05 crc kubenswrapper[4792]: I1202 19:47:05.887231 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/util/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.083734 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/pull/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.094981 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/pull/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.122988 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/util/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.272073 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/extract/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.282128 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/pull/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.288194 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9ccccce517e067e9e01e9f87d131b7f866bc945cdc62c92934fa1487f5wsqn_f6b14071-1f38-444e-8b2e-30e7eb904e0f/util/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.511250 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zpmp6_851b5fce-f6b9-4fef-a80c-e66336c5fa49/kube-rbac-proxy/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.530963 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-zpmp6_851b5fce-f6b9-4fef-a80c-e66336c5fa49/manager/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.555745 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xbg4j_77a52e44-0fcb-4b97-93de-0d26a6901c37/kube-rbac-proxy/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.691556 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xbg4j_77a52e44-0fcb-4b97-93de-0d26a6901c37/manager/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.723817 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-ddpv4_a725d6d0-4642-4316-9e67-e002d58f7117/kube-rbac-proxy/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.749014 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-ddpv4_a725d6d0-4642-4316-9e67-e002d58f7117/manager/0.log" Dec 02 19:47:06 crc kubenswrapper[4792]: I1202 19:47:06.917563 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-l4tvg_4998553b-ffbc-4684-9756-22885fec1a98/kube-rbac-proxy/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.047387 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-92mth_e3a43b00-c682-4d04-9996-ceb79a245a18/kube-rbac-proxy/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.072415 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-l4tvg_4998553b-ffbc-4684-9756-22885fec1a98/manager/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.134772 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-92mth_e3a43b00-c682-4d04-9996-ceb79a245a18/manager/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.279225 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xrbqg_9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9/kube-rbac-proxy/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.348242 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-xrbqg_9c2e4c8c-6f7d-41b0-bc43-19e0b14297f9/manager/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.415177 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-g8nwt_15e497ba-5375-4926-80f5-f46940572f8f/kube-rbac-proxy/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.483791 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-g8nwt_15e497ba-5375-4926-80f5-f46940572f8f/manager/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.568320 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-zm7n7_2abae0c8-1cd8-4329-a4dc-678124e1195a/kube-rbac-proxy/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.639992 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-zm7n7_2abae0c8-1cd8-4329-a4dc-678124e1195a/manager/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.684324 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-b7kc7_ef4ea028-2f42-4560-aad7-94553ba2d3d4/kube-rbac-proxy/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.801315 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-b7kc7_ef4ea028-2f42-4560-aad7-94553ba2d3d4/manager/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.840806 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-ksx7c_588c52cc-05c0-438d-bb0f-80bc1236d8cc/kube-rbac-proxy/0.log" Dec 02 19:47:07 crc kubenswrapper[4792]: I1202 19:47:07.939832 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-ksx7c_588c52cc-05c0-438d-bb0f-80bc1236d8cc/manager/0.log" Dec 02 19:47:08 crc kubenswrapper[4792]: I1202 19:47:08.031807 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-98tg4_dde05ba1-9b55-4f92-9782-d03fed8f26b0/kube-rbac-proxy/0.log" Dec 02 19:47:08 crc kubenswrapper[4792]: I1202 19:47:08.112531 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-98tg4_dde05ba1-9b55-4f92-9782-d03fed8f26b0/manager/0.log" Dec 02 19:47:08 crc kubenswrapper[4792]: I1202 19:47:08.229585 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj_8bd01614-55c6-44bf-b67b-8a6570d9425c/kube-rbac-proxy/0.log" Dec 02 19:47:08 crc kubenswrapper[4792]: I1202 19:47:08.240131 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jq2kj_8bd01614-55c6-44bf-b67b-8a6570d9425c/manager/0.log" Dec 02 19:47:08 crc kubenswrapper[4792]: I1202 19:47:08.635506 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6c49cf65b-lk99g_b729afba-684d-4ecf-a503-fadb0e933192/operator/0.log" Dec 02 19:47:08 crc kubenswrapper[4792]: I1202 19:47:08.727954 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-gm2bf_15960e0b-8e49-4e4b-b236-5efc49470e11/registry-server/0.log" Dec 02 19:47:08 crc kubenswrapper[4792]: I1202 19:47:08.887172 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-qrj8t_8becc537-85f1-4b33-8b6a-1ef3bc550cdd/kube-rbac-proxy/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.032125 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-qrj8t_8becc537-85f1-4b33-8b6a-1ef3bc550cdd/manager/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.078100 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kqqtk_c305906f-16d5-4e43-9666-299106995d65/kube-rbac-proxy/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.160777 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kqqtk_c305906f-16d5-4e43-9666-299106995d65/manager/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.315938 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-xn9ms_feb0adee-ff46-4603-80f1-a086af7e863c/operator/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.426184 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cfpd2_64a3a015-bcba-4079-b30b-47579e9a7513/kube-rbac-proxy/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.495947 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54d77c4c6-68vgq_f2e8a63e-9ce0-4009-b041-46c7f29daa11/manager/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.538467 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-cfpd2_64a3a015-bcba-4079-b30b-47579e9a7513/manager/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.614072 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6cc9d48475-tplrw_474ebfec-9504-4baa-a320-af5bd167bf33/kube-rbac-proxy/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.747185 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d922f_746154e5-b7a7-4ce9-b0db-4c88c998ccac/kube-rbac-proxy/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.873589 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d922f_746154e5-b7a7-4ce9-b0db-4c88c998ccac/manager/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.972210 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6cc9d48475-tplrw_474ebfec-9504-4baa-a320-af5bd167bf33/manager/0.log" Dec 02 19:47:09 crc kubenswrapper[4792]: I1202 19:47:09.996806 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-6v9cs_ad6c4009-148b-4b91-bd36-4d9bd2a16bed/kube-rbac-proxy/0.log" Dec 02 19:47:10 crc kubenswrapper[4792]: I1202 19:47:10.006072 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-6v9cs_ad6c4009-148b-4b91-bd36-4d9bd2a16bed/manager/0.log" Dec 02 19:47:32 crc kubenswrapper[4792]: I1202 19:47:32.152301 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-phfft_9053d7ed-27f1-470a-8164-6ef32c05ea87/control-plane-machine-set-operator/0.log" Dec 02 19:47:32 crc kubenswrapper[4792]: I1202 19:47:32.326582 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nnst7_fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1/machine-api-operator/0.log" Dec 02 19:47:32 crc kubenswrapper[4792]: I1202 19:47:32.335715 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nnst7_fb8e1e39-1ba7-4d9a-b6ad-e1ba38d6abf1/kube-rbac-proxy/0.log" Dec 02 19:47:38 crc kubenswrapper[4792]: I1202 19:47:38.081896 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:47:38 crc kubenswrapper[4792]: I1202 19:47:38.082929 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:47:48 crc kubenswrapper[4792]: I1202 19:47:48.210650 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-j9t7f_55936820-08a3-4569-a1c3-a2c8ff5ce620/cert-manager-controller/0.log" Dec 02 19:47:48 crc kubenswrapper[4792]: I1202 19:47:48.366571 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-qxjx2_99286d04-ee0d-49ca-84f4-4e7dd9fd9e76/cert-manager-cainjector/0.log" Dec 02 19:47:48 crc kubenswrapper[4792]: I1202 19:47:48.425193 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-2lqc8_a125909d-e0cc-4e4d-ad34-361379b74bf4/cert-manager-webhook/0.log" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.784104 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nmsh6"] Dec 02 19:48:02 crc kubenswrapper[4792]: E1202 19:48:02.784962 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="510ae30d-32f3-4072-8f4a-585a13117f55" containerName="container-00" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.784976 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="510ae30d-32f3-4072-8f4a-585a13117f55" containerName="container-00" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.785182 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="510ae30d-32f3-4072-8f4a-585a13117f55" containerName="container-00" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.787739 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.796094 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nmsh6"] Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.816192 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-utilities\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.816490 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-catalog-content\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.816513 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvv76\" (UniqueName: \"kubernetes.io/projected/4b43b869-0683-4a3d-a063-8746fa22fe85-kube-api-access-qvv76\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.917969 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-utilities\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.918054 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-catalog-content\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.918082 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvv76\" (UniqueName: \"kubernetes.io/projected/4b43b869-0683-4a3d-a063-8746fa22fe85-kube-api-access-qvv76\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.918487 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-utilities\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.918508 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-catalog-content\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:02 crc kubenswrapper[4792]: I1202 19:48:02.936415 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvv76\" (UniqueName: \"kubernetes.io/projected/4b43b869-0683-4a3d-a063-8746fa22fe85-kube-api-access-qvv76\") pod \"redhat-operators-nmsh6\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:03 crc kubenswrapper[4792]: I1202 19:48:03.104239 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:03 crc kubenswrapper[4792]: I1202 19:48:03.183256 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-gs72k_1609afe3-03e9-4bab-8ea5-444ffe47a8a0/nmstate-console-plugin/0.log" Dec 02 19:48:03 crc kubenswrapper[4792]: I1202 19:48:03.343947 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-cvm76_59543a69-801f-485f-b683-b9328aab396e/nmstate-handler/0.log" Dec 02 19:48:03 crc kubenswrapper[4792]: I1202 19:48:03.623415 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nmsh6"] Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.394537 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerID="f76a428b8883fc5c0b50a7fccd7b73a69486499bbda2f9a485dc4acfee5c3be1" exitCode=0 Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.394754 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmsh6" event={"ID":"4b43b869-0683-4a3d-a063-8746fa22fe85","Type":"ContainerDied","Data":"f76a428b8883fc5c0b50a7fccd7b73a69486499bbda2f9a485dc4acfee5c3be1"} Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.394934 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmsh6" event={"ID":"4b43b869-0683-4a3d-a063-8746fa22fe85","Type":"ContainerStarted","Data":"fdd65c57b21520f42124be841ab6980dcae773a04eb8fbaabb0c97833be99be6"} Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.396182 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-npmdr_9874a853-6f18-456b-9634-c3b923e8113c/nmstate-operator/0.log" Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.397307 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.398201 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-svbm7_34e1a790-24ea-4564-8453-f525053ec5fa/kube-rbac-proxy/0.log" Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.470564 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-svbm7_34e1a790-24ea-4564-8453-f525053ec5fa/nmstate-metrics/0.log" Dec 02 19:48:04 crc kubenswrapper[4792]: I1202 19:48:04.626508 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-w46s7_0ef33e67-bcbf-4c68-87d4-5cc1db2e73d0/nmstate-webhook/0.log" Dec 02 19:48:06 crc kubenswrapper[4792]: I1202 19:48:06.414823 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmsh6" event={"ID":"4b43b869-0683-4a3d-a063-8746fa22fe85","Type":"ContainerStarted","Data":"ace0ad2b6ade087588f5d54704f3ab9a837586669503762c45e4f3e845b8e218"} Dec 02 19:48:08 crc kubenswrapper[4792]: I1202 19:48:08.081310 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:48:08 crc kubenswrapper[4792]: I1202 19:48:08.081871 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:48:09 crc kubenswrapper[4792]: I1202 19:48:09.466152 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerID="ace0ad2b6ade087588f5d54704f3ab9a837586669503762c45e4f3e845b8e218" exitCode=0 Dec 02 19:48:09 crc kubenswrapper[4792]: I1202 19:48:09.466195 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmsh6" event={"ID":"4b43b869-0683-4a3d-a063-8746fa22fe85","Type":"ContainerDied","Data":"ace0ad2b6ade087588f5d54704f3ab9a837586669503762c45e4f3e845b8e218"} Dec 02 19:48:10 crc kubenswrapper[4792]: I1202 19:48:10.477133 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmsh6" event={"ID":"4b43b869-0683-4a3d-a063-8746fa22fe85","Type":"ContainerStarted","Data":"5134c1b08ed7d50ba78a8a8ecc4411b7bb1ad9381268813a1f48a7b86ca3bbfb"} Dec 02 19:48:10 crc kubenswrapper[4792]: I1202 19:48:10.501357 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nmsh6" podStartSLOduration=2.755688048 podStartE2EDuration="8.501342658s" podCreationTimestamp="2025-12-02 19:48:02 +0000 UTC" firstStartedPulling="2025-12-02 19:48:04.397097973 +0000 UTC m=+4315.169990301" lastFinishedPulling="2025-12-02 19:48:10.142752573 +0000 UTC m=+4320.915644911" observedRunningTime="2025-12-02 19:48:10.496054599 +0000 UTC m=+4321.268946927" watchObservedRunningTime="2025-12-02 19:48:10.501342658 +0000 UTC m=+4321.274234986" Dec 02 19:48:13 crc kubenswrapper[4792]: I1202 19:48:13.104777 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:13 crc kubenswrapper[4792]: I1202 19:48:13.105315 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:14 crc kubenswrapper[4792]: I1202 19:48:14.163966 4792 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nmsh6" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="registry-server" probeResult="failure" output=< Dec 02 19:48:14 crc kubenswrapper[4792]: timeout: failed to connect service ":50051" within 1s Dec 02 19:48:14 crc kubenswrapper[4792]: > Dec 02 19:48:20 crc kubenswrapper[4792]: I1202 19:48:20.420130 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/kube-rbac-proxy/0.log" Dec 02 19:48:20 crc kubenswrapper[4792]: I1202 19:48:20.467375 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/manager/0.log" Dec 02 19:48:23 crc kubenswrapper[4792]: I1202 19:48:23.159879 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:23 crc kubenswrapper[4792]: I1202 19:48:23.223015 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:23 crc kubenswrapper[4792]: I1202 19:48:23.398581 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nmsh6"] Dec 02 19:48:24 crc kubenswrapper[4792]: I1202 19:48:24.670251 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nmsh6" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="registry-server" containerID="cri-o://5134c1b08ed7d50ba78a8a8ecc4411b7bb1ad9381268813a1f48a7b86ca3bbfb" gracePeriod=2 Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.687772 4792 generic.go:334] "Generic (PLEG): container finished" podID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerID="5134c1b08ed7d50ba78a8a8ecc4411b7bb1ad9381268813a1f48a7b86ca3bbfb" exitCode=0 Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.687970 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmsh6" event={"ID":"4b43b869-0683-4a3d-a063-8746fa22fe85","Type":"ContainerDied","Data":"5134c1b08ed7d50ba78a8a8ecc4411b7bb1ad9381268813a1f48a7b86ca3bbfb"} Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.828055 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.934503 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvv76\" (UniqueName: \"kubernetes.io/projected/4b43b869-0683-4a3d-a063-8746fa22fe85-kube-api-access-qvv76\") pod \"4b43b869-0683-4a3d-a063-8746fa22fe85\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.934572 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-catalog-content\") pod \"4b43b869-0683-4a3d-a063-8746fa22fe85\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.934603 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-utilities\") pod \"4b43b869-0683-4a3d-a063-8746fa22fe85\" (UID: \"4b43b869-0683-4a3d-a063-8746fa22fe85\") " Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.935467 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-utilities" (OuterVolumeSpecName: "utilities") pod "4b43b869-0683-4a3d-a063-8746fa22fe85" (UID: "4b43b869-0683-4a3d-a063-8746fa22fe85"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:48:25 crc kubenswrapper[4792]: I1202 19:48:25.950171 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b43b869-0683-4a3d-a063-8746fa22fe85-kube-api-access-qvv76" (OuterVolumeSpecName: "kube-api-access-qvv76") pod "4b43b869-0683-4a3d-a063-8746fa22fe85" (UID: "4b43b869-0683-4a3d-a063-8746fa22fe85"). InnerVolumeSpecName "kube-api-access-qvv76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.037694 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvv76\" (UniqueName: \"kubernetes.io/projected/4b43b869-0683-4a3d-a063-8746fa22fe85-kube-api-access-qvv76\") on node \"crc\" DevicePath \"\"" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.037727 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.054959 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b43b869-0683-4a3d-a063-8746fa22fe85" (UID: "4b43b869-0683-4a3d-a063-8746fa22fe85"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.140060 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b43b869-0683-4a3d-a063-8746fa22fe85-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.707235 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nmsh6" event={"ID":"4b43b869-0683-4a3d-a063-8746fa22fe85","Type":"ContainerDied","Data":"fdd65c57b21520f42124be841ab6980dcae773a04eb8fbaabb0c97833be99be6"} Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.707735 4792 scope.go:117] "RemoveContainer" containerID="5134c1b08ed7d50ba78a8a8ecc4411b7bb1ad9381268813a1f48a7b86ca3bbfb" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.708042 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nmsh6" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.745272 4792 scope.go:117] "RemoveContainer" containerID="ace0ad2b6ade087588f5d54704f3ab9a837586669503762c45e4f3e845b8e218" Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.758162 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nmsh6"] Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.767543 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nmsh6"] Dec 02 19:48:26 crc kubenswrapper[4792]: I1202 19:48:26.784024 4792 scope.go:117] "RemoveContainer" containerID="f76a428b8883fc5c0b50a7fccd7b73a69486499bbda2f9a485dc4acfee5c3be1" Dec 02 19:48:27 crc kubenswrapper[4792]: I1202 19:48:27.554849 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" path="/var/lib/kubelet/pods/4b43b869-0683-4a3d-a063-8746fa22fe85/volumes" Dec 02 19:48:37 crc kubenswrapper[4792]: I1202 19:48:37.782181 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-pjmcf_cbe95ba3-c2a1-4755-8571-ddaba0aca9d6/kube-rbac-proxy/0.log" Dec 02 19:48:37 crc kubenswrapper[4792]: I1202 19:48:37.906704 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-pjmcf_cbe95ba3-c2a1-4755-8571-ddaba0aca9d6/controller/0.log" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.050086 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.080966 4792 patch_prober.go:28] interesting pod/machine-config-daemon-wpdh4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.081048 4792 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.081111 4792 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.082013 4792 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624"} pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.082087 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerName="machine-config-daemon" containerID="cri-o://c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" gracePeriod=600 Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.232142 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.283350 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.288119 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.304650 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:48:38 crc kubenswrapper[4792]: E1202 19:48:38.765045 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.841243 4792 generic.go:334] "Generic (PLEG): container finished" podID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" exitCode=0 Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.841329 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerDied","Data":"c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624"} Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.841541 4792 scope.go:117] "RemoveContainer" containerID="91980fd36fc859ce6a68107c8e14bd4f51882c72f43243d77f08d00f806eea0a" Dec 02 19:48:38 crc kubenswrapper[4792]: I1202 19:48:38.842653 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:48:38 crc kubenswrapper[4792]: E1202 19:48:38.843012 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.032923 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.087033 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.087178 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.088706 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.309170 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-frr-files/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.336803 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-reloader/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.343401 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/controller/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.355361 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/cp-metrics/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.513490 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/frr-metrics/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.522534 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/kube-rbac-proxy/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.562240 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/kube-rbac-proxy-frr/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.708798 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/reloader/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.825284 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-cjmwb_4be13a2e-5592-4f44-ad74-31cf277205bf/frr-k8s-webhook-server/0.log" Dec 02 19:48:39 crc kubenswrapper[4792]: I1202 19:48:39.987724 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7d46f7f9d9-8xxzl_6ba2dc70-5519-41e5-b9fe-57508fd8e395/manager/0.log" Dec 02 19:48:40 crc kubenswrapper[4792]: I1202 19:48:40.411997 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6974b4d988-96g88_11d281b1-8805-4a7a-b234-53a4e52ae307/webhook-server/0.log" Dec 02 19:48:40 crc kubenswrapper[4792]: I1202 19:48:40.550568 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fmdjd_38a2f9f3-6ce0-4790-8d68-59a8fc723caa/kube-rbac-proxy/0.log" Dec 02 19:48:40 crc kubenswrapper[4792]: I1202 19:48:40.704704 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-st62r_57c06fe7-dccf-4f91-a9b1-21d345dd688e/frr/0.log" Dec 02 19:48:40 crc kubenswrapper[4792]: I1202 19:48:40.952582 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fmdjd_38a2f9f3-6ce0-4790-8d68-59a8fc723caa/speaker/0.log" Dec 02 19:48:52 crc kubenswrapper[4792]: I1202 19:48:52.540315 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:48:52 crc kubenswrapper[4792]: E1202 19:48:52.541103 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:48:57 crc kubenswrapper[4792]: I1202 19:48:57.189281 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/util/0.log" Dec 02 19:48:57 crc kubenswrapper[4792]: I1202 19:48:57.365437 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/pull/0.log" Dec 02 19:48:57 crc kubenswrapper[4792]: I1202 19:48:57.379212 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/util/0.log" Dec 02 19:48:57 crc kubenswrapper[4792]: I1202 19:48:57.395574 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/pull/0.log" Dec 02 19:48:57 crc kubenswrapper[4792]: I1202 19:48:57.913163 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/util/0.log" Dec 02 19:48:57 crc kubenswrapper[4792]: I1202 19:48:57.977442 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/pull/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.005430 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wvqmw_19402c55-9b6f-4486-a3ce-e6971e5da081/extract/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.092974 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/util/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.280144 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/pull/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.336002 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/util/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.336881 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/pull/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.515710 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/pull/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.526797 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/extract/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.543938 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303hbhzx_22b8caed-5376-403f-bb65-e3a12cf9c7af/util/0.log" Dec 02 19:48:58 crc kubenswrapper[4792]: I1202 19:48:58.671313 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/util/0.log" Dec 02 19:48:59 crc kubenswrapper[4792]: I1202 19:48:59.019682 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/pull/0.log" Dec 02 19:48:59 crc kubenswrapper[4792]: I1202 19:48:59.028563 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/util/0.log" Dec 02 19:48:59 crc kubenswrapper[4792]: I1202 19:48:59.046800 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/pull/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.051106 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/extract/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.070388 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/pull/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.072945 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f5p9hp_63ea8a4a-8b04-44f4-a0a8-4767d02973bb/util/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.247676 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/util/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.553499 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/util/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.609467 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/pull/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.671953 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/pull/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.715494 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/util/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.817687 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/extract/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.834615 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92108kgnb_d557837e-335a-4da0-b7aa-e8d0a516eada/pull/0.log" Dec 02 19:49:00 crc kubenswrapper[4792]: I1202 19:49:00.953662 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/util/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.044379 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/util/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.068738 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/pull/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.088894 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/pull/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.278881 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/util/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.284628 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/extract/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.295937 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f832dqg7_5bdd4894-7306-472f-ae3d-2d1a55966015/pull/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.342849 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-utilities/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.467631 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-utilities/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.483724 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-content/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.506904 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-content/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.759786 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-utilities/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.823705 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/extract-content/0.log" Dec 02 19:49:01 crc kubenswrapper[4792]: I1202 19:49:01.830102 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-utilities/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.022679 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-utilities/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.061222 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-content/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.113897 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-content/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.279240 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p7bwt_72d09f2a-dc85-44d7-bd86-7248dcedd68b/registry-server/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.456880 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-utilities/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.502474 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7r9rs_1ed6fdc1-3384-4b3a-92a3-fe3c5a50c21e/marketplace-operator/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.503554 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/extract-content/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.779015 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-utilities/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.959575 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-content/0.log" Dec 02 19:49:02 crc kubenswrapper[4792]: I1202 19:49:02.994680 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-utilities/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.010793 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-content/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.096484 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-ch67n_ace09f1b-8dfe-48ae-93b6-b6a6f32beb8b/registry-server/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.156153 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-content/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.184079 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/extract-utilities/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.327644 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-utilities/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.350122 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gtsnz_2f57faa3-e0a7-4d6a-b404-0e0d2fb73461/registry-server/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.532683 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-utilities/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.549175 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-content/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.583451 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-content/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.702587 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-utilities/0.log" Dec 02 19:49:03 crc kubenswrapper[4792]: I1202 19:49:03.744465 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/extract-content/0.log" Dec 02 19:49:04 crc kubenswrapper[4792]: I1202 19:49:04.128357 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wlh2_e56bb4a5-5196-4a99-8d4d-2c8449675e62/registry-server/0.log" Dec 02 19:49:05 crc kubenswrapper[4792]: I1202 19:49:05.540222 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:49:05 crc kubenswrapper[4792]: E1202 19:49:05.540643 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.479247 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kk89t"] Dec 02 19:49:10 crc kubenswrapper[4792]: E1202 19:49:10.480359 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="extract-content" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.480376 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="extract-content" Dec 02 19:49:10 crc kubenswrapper[4792]: E1202 19:49:10.480400 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="extract-utilities" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.480408 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="extract-utilities" Dec 02 19:49:10 crc kubenswrapper[4792]: E1202 19:49:10.480445 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="registry-server" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.480453 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="registry-server" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.480728 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b43b869-0683-4a3d-a063-8746fa22fe85" containerName="registry-server" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.482745 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.510072 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kk89t"] Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.613557 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-catalog-content\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.613834 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-utilities\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.613888 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzjrf\" (UniqueName: \"kubernetes.io/projected/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-kube-api-access-xzjrf\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.715836 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzjrf\" (UniqueName: \"kubernetes.io/projected/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-kube-api-access-xzjrf\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.715934 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-catalog-content\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.716251 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-utilities\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.716424 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-catalog-content\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.716735 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-utilities\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.743068 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzjrf\" (UniqueName: \"kubernetes.io/projected/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-kube-api-access-xzjrf\") pod \"certified-operators-kk89t\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:10 crc kubenswrapper[4792]: I1202 19:49:10.809091 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:11 crc kubenswrapper[4792]: I1202 19:49:11.331567 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kk89t"] Dec 02 19:49:12 crc kubenswrapper[4792]: I1202 19:49:12.176954 4792 generic.go:334] "Generic (PLEG): container finished" podID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerID="2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85" exitCode=0 Dec 02 19:49:12 crc kubenswrapper[4792]: I1202 19:49:12.177183 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk89t" event={"ID":"e28769be-8de5-49cd-94cc-4f5c35ddc1e6","Type":"ContainerDied","Data":"2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85"} Dec 02 19:49:12 crc kubenswrapper[4792]: I1202 19:49:12.178822 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk89t" event={"ID":"e28769be-8de5-49cd-94cc-4f5c35ddc1e6","Type":"ContainerStarted","Data":"227d8d34c21bccb8646f8d2ec8e3ac2a9c34f3bf3b4fe9a132b9e5ed4e5d4e2b"} Dec 02 19:49:13 crc kubenswrapper[4792]: I1202 19:49:13.187600 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk89t" event={"ID":"e28769be-8de5-49cd-94cc-4f5c35ddc1e6","Type":"ContainerStarted","Data":"1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d"} Dec 02 19:49:14 crc kubenswrapper[4792]: I1202 19:49:14.216003 4792 generic.go:334] "Generic (PLEG): container finished" podID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerID="1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d" exitCode=0 Dec 02 19:49:14 crc kubenswrapper[4792]: I1202 19:49:14.216073 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk89t" event={"ID":"e28769be-8de5-49cd-94cc-4f5c35ddc1e6","Type":"ContainerDied","Data":"1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d"} Dec 02 19:49:15 crc kubenswrapper[4792]: I1202 19:49:15.227397 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk89t" event={"ID":"e28769be-8de5-49cd-94cc-4f5c35ddc1e6","Type":"ContainerStarted","Data":"89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc"} Dec 02 19:49:15 crc kubenswrapper[4792]: I1202 19:49:15.249363 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kk89t" podStartSLOduration=2.709741509 podStartE2EDuration="5.249347464s" podCreationTimestamp="2025-12-02 19:49:10 +0000 UTC" firstStartedPulling="2025-12-02 19:49:12.179475293 +0000 UTC m=+4382.952367631" lastFinishedPulling="2025-12-02 19:49:14.719081218 +0000 UTC m=+4385.491973586" observedRunningTime="2025-12-02 19:49:15.246800147 +0000 UTC m=+4386.019692475" watchObservedRunningTime="2025-12-02 19:49:15.249347464 +0000 UTC m=+4386.022239792" Dec 02 19:49:16 crc kubenswrapper[4792]: I1202 19:49:16.540417 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:49:16 crc kubenswrapper[4792]: E1202 19:49:16.541458 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:49:20 crc kubenswrapper[4792]: I1202 19:49:20.809818 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:20 crc kubenswrapper[4792]: I1202 19:49:20.810281 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:20 crc kubenswrapper[4792]: I1202 19:49:20.865995 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:21 crc kubenswrapper[4792]: I1202 19:49:21.159462 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-5vh6b_9b8eb6d9-8320-401e-8092-5333c1772c4e/prometheus-operator/0.log" Dec 02 19:49:21 crc kubenswrapper[4792]: I1202 19:49:21.299703 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-c7c77bb88-c2k8d_d4a8cfbd-83cc-47ee-abe2-f48802bb58e4/prometheus-operator-admission-webhook/0.log" Dec 02 19:49:21 crc kubenswrapper[4792]: I1202 19:49:21.371026 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-c7c77bb88-f6nm9_aeed3219-8084-40fd-888a-1e4bc4dd3179/prometheus-operator-admission-webhook/0.log" Dec 02 19:49:21 crc kubenswrapper[4792]: I1202 19:49:21.399589 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:21 crc kubenswrapper[4792]: I1202 19:49:21.449622 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kk89t"] Dec 02 19:49:21 crc kubenswrapper[4792]: I1202 19:49:21.482042 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-jg8gh_eba5ee28-f55c-4e22-b4f7-22899eb4fdb7/operator/0.log" Dec 02 19:49:21 crc kubenswrapper[4792]: I1202 19:49:21.548622 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-nqbpz_d953db38-bd34-4d90-9c21-64ed4b3feaaf/perses-operator/0.log" Dec 02 19:49:23 crc kubenswrapper[4792]: I1202 19:49:23.366484 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kk89t" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="registry-server" containerID="cri-o://89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc" gracePeriod=2 Dec 02 19:49:23 crc kubenswrapper[4792]: I1202 19:49:23.910026 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.011732 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-utilities\") pod \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.012046 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-catalog-content\") pod \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.012087 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzjrf\" (UniqueName: \"kubernetes.io/projected/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-kube-api-access-xzjrf\") pod \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\" (UID: \"e28769be-8de5-49cd-94cc-4f5c35ddc1e6\") " Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.012767 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-utilities" (OuterVolumeSpecName: "utilities") pod "e28769be-8de5-49cd-94cc-4f5c35ddc1e6" (UID: "e28769be-8de5-49cd-94cc-4f5c35ddc1e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.018997 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-kube-api-access-xzjrf" (OuterVolumeSpecName: "kube-api-access-xzjrf") pod "e28769be-8de5-49cd-94cc-4f5c35ddc1e6" (UID: "e28769be-8de5-49cd-94cc-4f5c35ddc1e6"). InnerVolumeSpecName "kube-api-access-xzjrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.060658 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e28769be-8de5-49cd-94cc-4f5c35ddc1e6" (UID: "e28769be-8de5-49cd-94cc-4f5c35ddc1e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.114566 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.114604 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.114618 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzjrf\" (UniqueName: \"kubernetes.io/projected/e28769be-8de5-49cd-94cc-4f5c35ddc1e6-kube-api-access-xzjrf\") on node \"crc\" DevicePath \"\"" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.386543 4792 generic.go:334] "Generic (PLEG): container finished" podID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerID="89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc" exitCode=0 Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.386592 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk89t" event={"ID":"e28769be-8de5-49cd-94cc-4f5c35ddc1e6","Type":"ContainerDied","Data":"89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc"} Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.386624 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk89t" event={"ID":"e28769be-8de5-49cd-94cc-4f5c35ddc1e6","Type":"ContainerDied","Data":"227d8d34c21bccb8646f8d2ec8e3ac2a9c34f3bf3b4fe9a132b9e5ed4e5d4e2b"} Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.386643 4792 scope.go:117] "RemoveContainer" containerID="89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.386790 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kk89t" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.422512 4792 scope.go:117] "RemoveContainer" containerID="1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.429410 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kk89t"] Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.445180 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kk89t"] Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.449035 4792 scope.go:117] "RemoveContainer" containerID="2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.497958 4792 scope.go:117] "RemoveContainer" containerID="89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc" Dec 02 19:49:24 crc kubenswrapper[4792]: E1202 19:49:24.498490 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc\": container with ID starting with 89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc not found: ID does not exist" containerID="89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.498547 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc"} err="failed to get container status \"89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc\": rpc error: code = NotFound desc = could not find container \"89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc\": container with ID starting with 89ce97962d9198e8803efde75d7cf563c265a7c81aad8b41608c57714787fcfc not found: ID does not exist" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.498575 4792 scope.go:117] "RemoveContainer" containerID="1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d" Dec 02 19:49:24 crc kubenswrapper[4792]: E1202 19:49:24.499026 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d\": container with ID starting with 1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d not found: ID does not exist" containerID="1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.499082 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d"} err="failed to get container status \"1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d\": rpc error: code = NotFound desc = could not find container \"1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d\": container with ID starting with 1c8190105c19727615349649775a67dab9beebf7459e37c6afcbfd8e6bd3c75d not found: ID does not exist" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.499117 4792 scope.go:117] "RemoveContainer" containerID="2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85" Dec 02 19:49:24 crc kubenswrapper[4792]: E1202 19:49:24.499440 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85\": container with ID starting with 2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85 not found: ID does not exist" containerID="2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85" Dec 02 19:49:24 crc kubenswrapper[4792]: I1202 19:49:24.499480 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85"} err="failed to get container status \"2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85\": rpc error: code = NotFound desc = could not find container \"2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85\": container with ID starting with 2d8e8edc69b4a5912f512f2caba28d570ddb0c66e4f39896f594fb483fa65d85 not found: ID does not exist" Dec 02 19:49:25 crc kubenswrapper[4792]: I1202 19:49:25.553081 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" path="/var/lib/kubelet/pods/e28769be-8de5-49cd-94cc-4f5c35ddc1e6/volumes" Dec 02 19:49:31 crc kubenswrapper[4792]: I1202 19:49:31.539754 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:49:31 crc kubenswrapper[4792]: E1202 19:49:31.540620 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:49:36 crc kubenswrapper[4792]: I1202 19:49:36.769986 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/kube-rbac-proxy/0.log" Dec 02 19:49:36 crc kubenswrapper[4792]: I1202 19:49:36.790766 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-7fd7bc68c8-q7j7s_0ea8c233-1ba5-435d-a7e8-93d9d055fe7b/manager/0.log" Dec 02 19:49:42 crc kubenswrapper[4792]: I1202 19:49:42.541691 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:49:42 crc kubenswrapper[4792]: E1202 19:49:42.542651 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:49:55 crc kubenswrapper[4792]: I1202 19:49:55.540451 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:49:55 crc kubenswrapper[4792]: E1202 19:49:55.541281 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:50:07 crc kubenswrapper[4792]: I1202 19:50:07.540204 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:50:07 crc kubenswrapper[4792]: E1202 19:50:07.543703 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:50:19 crc kubenswrapper[4792]: I1202 19:50:19.561884 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:50:19 crc kubenswrapper[4792]: E1202 19:50:19.563465 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:50:30 crc kubenswrapper[4792]: I1202 19:50:30.539226 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:50:30 crc kubenswrapper[4792]: E1202 19:50:30.540041 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:50:41 crc kubenswrapper[4792]: I1202 19:50:41.540913 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:50:41 crc kubenswrapper[4792]: E1202 19:50:41.542283 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:50:52 crc kubenswrapper[4792]: I1202 19:50:52.541764 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:50:52 crc kubenswrapper[4792]: E1202 19:50:52.542750 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:51:04 crc kubenswrapper[4792]: I1202 19:51:04.542029 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:51:04 crc kubenswrapper[4792]: E1202 19:51:04.543672 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:51:15 crc kubenswrapper[4792]: I1202 19:51:15.540203 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:51:15 crc kubenswrapper[4792]: E1202 19:51:15.541297 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:51:17 crc kubenswrapper[4792]: I1202 19:51:17.690860 4792 generic.go:334] "Generic (PLEG): container finished" podID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerID="9f309923943fe77228437026c9608e15d4c789ef280ac860417e884b641a6ee4" exitCode=0 Dec 02 19:51:17 crc kubenswrapper[4792]: I1202 19:51:17.691194 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-kvrfw/must-gather-42htg" event={"ID":"1465f5ca-7bc8-4c1a-b901-1311b06825f5","Type":"ContainerDied","Data":"9f309923943fe77228437026c9608e15d4c789ef280ac860417e884b641a6ee4"} Dec 02 19:51:17 crc kubenswrapper[4792]: I1202 19:51:17.691992 4792 scope.go:117] "RemoveContainer" containerID="9f309923943fe77228437026c9608e15d4c789ef280ac860417e884b641a6ee4" Dec 02 19:51:17 crc kubenswrapper[4792]: I1202 19:51:17.999616 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kvrfw_must-gather-42htg_1465f5ca-7bc8-4c1a-b901-1311b06825f5/gather/0.log" Dec 02 19:51:28 crc kubenswrapper[4792]: I1202 19:51:28.418789 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-kvrfw/must-gather-42htg"] Dec 02 19:51:28 crc kubenswrapper[4792]: I1202 19:51:28.419717 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-kvrfw/must-gather-42htg" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerName="copy" containerID="cri-o://a2786ad5ea311e9508d52ed8ac75af87649020cab77560afcb687bec2e57b316" gracePeriod=2 Dec 02 19:51:28 crc kubenswrapper[4792]: I1202 19:51:28.431722 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-kvrfw/must-gather-42htg"] Dec 02 19:51:28 crc kubenswrapper[4792]: I1202 19:51:28.816782 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kvrfw_must-gather-42htg_1465f5ca-7bc8-4c1a-b901-1311b06825f5/copy/0.log" Dec 02 19:51:28 crc kubenswrapper[4792]: I1202 19:51:28.817450 4792 generic.go:334] "Generic (PLEG): container finished" podID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerID="a2786ad5ea311e9508d52ed8ac75af87649020cab77560afcb687bec2e57b316" exitCode=143 Dec 02 19:51:28 crc kubenswrapper[4792]: I1202 19:51:28.965981 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kvrfw_must-gather-42htg_1465f5ca-7bc8-4c1a-b901-1311b06825f5/copy/0.log" Dec 02 19:51:28 crc kubenswrapper[4792]: I1202 19:51:28.966354 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.059075 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmjhn\" (UniqueName: \"kubernetes.io/projected/1465f5ca-7bc8-4c1a-b901-1311b06825f5-kube-api-access-lmjhn\") pod \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.059170 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1465f5ca-7bc8-4c1a-b901-1311b06825f5-must-gather-output\") pod \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\" (UID: \"1465f5ca-7bc8-4c1a-b901-1311b06825f5\") " Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.066408 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1465f5ca-7bc8-4c1a-b901-1311b06825f5-kube-api-access-lmjhn" (OuterVolumeSpecName: "kube-api-access-lmjhn") pod "1465f5ca-7bc8-4c1a-b901-1311b06825f5" (UID: "1465f5ca-7bc8-4c1a-b901-1311b06825f5"). InnerVolumeSpecName "kube-api-access-lmjhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.161771 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmjhn\" (UniqueName: \"kubernetes.io/projected/1465f5ca-7bc8-4c1a-b901-1311b06825f5-kube-api-access-lmjhn\") on node \"crc\" DevicePath \"\"" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.237445 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1465f5ca-7bc8-4c1a-b901-1311b06825f5-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "1465f5ca-7bc8-4c1a-b901-1311b06825f5" (UID: "1465f5ca-7bc8-4c1a-b901-1311b06825f5"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.264177 4792 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1465f5ca-7bc8-4c1a-b901-1311b06825f5-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.553488 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:51:29 crc kubenswrapper[4792]: E1202 19:51:29.554174 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.556216 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" path="/var/lib/kubelet/pods/1465f5ca-7bc8-4c1a-b901-1311b06825f5/volumes" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.828800 4792 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-kvrfw_must-gather-42htg_1465f5ca-7bc8-4c1a-b901-1311b06825f5/copy/0.log" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.829271 4792 scope.go:117] "RemoveContainer" containerID="a2786ad5ea311e9508d52ed8ac75af87649020cab77560afcb687bec2e57b316" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.829362 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-kvrfw/must-gather-42htg" Dec 02 19:51:29 crc kubenswrapper[4792]: I1202 19:51:29.847881 4792 scope.go:117] "RemoveContainer" containerID="9f309923943fe77228437026c9608e15d4c789ef280ac860417e884b641a6ee4" Dec 02 19:51:41 crc kubenswrapper[4792]: I1202 19:51:41.423352 4792 scope.go:117] "RemoveContainer" containerID="366ab343cae03f6096899271bdd3bf2f2555c5c6cd150f73f1f47119ad3566b0" Dec 02 19:51:41 crc kubenswrapper[4792]: I1202 19:51:41.452163 4792 scope.go:117] "RemoveContainer" containerID="a3d051d39a8754e0f1a7474a4cb4eed43e2554520f94d386267c1f1fb4428b64" Dec 02 19:51:42 crc kubenswrapper[4792]: I1202 19:51:42.542455 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:51:42 crc kubenswrapper[4792]: E1202 19:51:42.543229 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:51:57 crc kubenswrapper[4792]: I1202 19:51:57.540003 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:51:57 crc kubenswrapper[4792]: E1202 19:51:57.541600 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:52:09 crc kubenswrapper[4792]: I1202 19:52:09.554298 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:52:09 crc kubenswrapper[4792]: E1202 19:52:09.555636 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:52:21 crc kubenswrapper[4792]: I1202 19:52:21.581746 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:52:21 crc kubenswrapper[4792]: E1202 19:52:21.582393 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:52:32 crc kubenswrapper[4792]: I1202 19:52:32.539838 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:52:32 crc kubenswrapper[4792]: E1202 19:52:32.540504 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:52:47 crc kubenswrapper[4792]: I1202 19:52:47.540384 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:52:47 crc kubenswrapper[4792]: E1202 19:52:47.541334 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.421725 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5vg6k"] Dec 02 19:52:55 crc kubenswrapper[4792]: E1202 19:52:55.423847 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="registry-server" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.423876 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="registry-server" Dec 02 19:52:55 crc kubenswrapper[4792]: E1202 19:52:55.423890 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerName="gather" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.423899 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerName="gather" Dec 02 19:52:55 crc kubenswrapper[4792]: E1202 19:52:55.423942 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerName="copy" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.423951 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerName="copy" Dec 02 19:52:55 crc kubenswrapper[4792]: E1202 19:52:55.423977 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="extract-utilities" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.423986 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="extract-utilities" Dec 02 19:52:55 crc kubenswrapper[4792]: E1202 19:52:55.424000 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="extract-content" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.424008 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="extract-content" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.424289 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerName="gather" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.424306 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="1465f5ca-7bc8-4c1a-b901-1311b06825f5" containerName="copy" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.424318 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="e28769be-8de5-49cd-94cc-4f5c35ddc1e6" containerName="registry-server" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.426495 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.436344 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5vg6k"] Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.567333 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgc9c\" (UniqueName: \"kubernetes.io/projected/67b7043f-a615-4a79-9704-40f367e3c4ce-kube-api-access-rgc9c\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.567628 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-catalog-content\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.567948 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-utilities\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.669958 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgc9c\" (UniqueName: \"kubernetes.io/projected/67b7043f-a615-4a79-9704-40f367e3c4ce-kube-api-access-rgc9c\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.670076 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-catalog-content\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.670184 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-utilities\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.670967 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-utilities\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.670972 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-catalog-content\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.700859 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgc9c\" (UniqueName: \"kubernetes.io/projected/67b7043f-a615-4a79-9704-40f367e3c4ce-kube-api-access-rgc9c\") pod \"redhat-marketplace-5vg6k\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:55 crc kubenswrapper[4792]: I1202 19:52:55.767093 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:52:56 crc kubenswrapper[4792]: I1202 19:52:56.348480 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5vg6k"] Dec 02 19:52:56 crc kubenswrapper[4792]: W1202 19:52:56.355611 4792 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67b7043f_a615_4a79_9704_40f367e3c4ce.slice/crio-666e47d873974ef988206c8e48ca3522b6cb7dbd232d621467e560f8a5762254 WatchSource:0}: Error finding container 666e47d873974ef988206c8e48ca3522b6cb7dbd232d621467e560f8a5762254: Status 404 returned error can't find the container with id 666e47d873974ef988206c8e48ca3522b6cb7dbd232d621467e560f8a5762254 Dec 02 19:52:56 crc kubenswrapper[4792]: I1202 19:52:56.840219 4792 generic.go:334] "Generic (PLEG): container finished" podID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerID="330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77" exitCode=0 Dec 02 19:52:56 crc kubenswrapper[4792]: I1202 19:52:56.840481 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5vg6k" event={"ID":"67b7043f-a615-4a79-9704-40f367e3c4ce","Type":"ContainerDied","Data":"330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77"} Dec 02 19:52:56 crc kubenswrapper[4792]: I1202 19:52:56.840505 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5vg6k" event={"ID":"67b7043f-a615-4a79-9704-40f367e3c4ce","Type":"ContainerStarted","Data":"666e47d873974ef988206c8e48ca3522b6cb7dbd232d621467e560f8a5762254"} Dec 02 19:52:58 crc kubenswrapper[4792]: I1202 19:52:58.865044 4792 generic.go:334] "Generic (PLEG): container finished" podID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerID="5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1" exitCode=0 Dec 02 19:52:58 crc kubenswrapper[4792]: I1202 19:52:58.865139 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5vg6k" event={"ID":"67b7043f-a615-4a79-9704-40f367e3c4ce","Type":"ContainerDied","Data":"5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1"} Dec 02 19:52:59 crc kubenswrapper[4792]: I1202 19:52:59.880164 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5vg6k" event={"ID":"67b7043f-a615-4a79-9704-40f367e3c4ce","Type":"ContainerStarted","Data":"75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2"} Dec 02 19:52:59 crc kubenswrapper[4792]: I1202 19:52:59.910139 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5vg6k" podStartSLOduration=2.37445685 podStartE2EDuration="4.910116482s" podCreationTimestamp="2025-12-02 19:52:55 +0000 UTC" firstStartedPulling="2025-12-02 19:52:56.842961622 +0000 UTC m=+4607.615853990" lastFinishedPulling="2025-12-02 19:52:59.378621284 +0000 UTC m=+4610.151513622" observedRunningTime="2025-12-02 19:52:59.903630692 +0000 UTC m=+4610.676523030" watchObservedRunningTime="2025-12-02 19:52:59.910116482 +0000 UTC m=+4610.683008820" Dec 02 19:53:01 crc kubenswrapper[4792]: I1202 19:53:01.540002 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:53:01 crc kubenswrapper[4792]: E1202 19:53:01.541137 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:53:05 crc kubenswrapper[4792]: I1202 19:53:05.767685 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:53:05 crc kubenswrapper[4792]: I1202 19:53:05.768444 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:53:05 crc kubenswrapper[4792]: I1202 19:53:05.864839 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:53:06 crc kubenswrapper[4792]: I1202 19:53:06.018379 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:53:06 crc kubenswrapper[4792]: I1202 19:53:06.117001 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5vg6k"] Dec 02 19:53:07 crc kubenswrapper[4792]: I1202 19:53:07.975040 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5vg6k" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="registry-server" containerID="cri-o://75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2" gracePeriod=2 Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.525472 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.695968 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-catalog-content\") pod \"67b7043f-a615-4a79-9704-40f367e3c4ce\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.696343 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgc9c\" (UniqueName: \"kubernetes.io/projected/67b7043f-a615-4a79-9704-40f367e3c4ce-kube-api-access-rgc9c\") pod \"67b7043f-a615-4a79-9704-40f367e3c4ce\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.696464 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-utilities\") pod \"67b7043f-a615-4a79-9704-40f367e3c4ce\" (UID: \"67b7043f-a615-4a79-9704-40f367e3c4ce\") " Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.699751 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-utilities" (OuterVolumeSpecName: "utilities") pod "67b7043f-a615-4a79-9704-40f367e3c4ce" (UID: "67b7043f-a615-4a79-9704-40f367e3c4ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.738028 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67b7043f-a615-4a79-9704-40f367e3c4ce" (UID: "67b7043f-a615-4a79-9704-40f367e3c4ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.799515 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:53:08 crc kubenswrapper[4792]: I1202 19:53:08.799591 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67b7043f-a615-4a79-9704-40f367e3c4ce-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.023155 4792 generic.go:334] "Generic (PLEG): container finished" podID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerID="75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2" exitCode=0 Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.023209 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5vg6k" event={"ID":"67b7043f-a615-4a79-9704-40f367e3c4ce","Type":"ContainerDied","Data":"75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2"} Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.023231 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5vg6k" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.023244 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5vg6k" event={"ID":"67b7043f-a615-4a79-9704-40f367e3c4ce","Type":"ContainerDied","Data":"666e47d873974ef988206c8e48ca3522b6cb7dbd232d621467e560f8a5762254"} Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.023269 4792 scope.go:117] "RemoveContainer" containerID="75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.044924 4792 scope.go:117] "RemoveContainer" containerID="5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.452331 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67b7043f-a615-4a79-9704-40f367e3c4ce-kube-api-access-rgc9c" (OuterVolumeSpecName: "kube-api-access-rgc9c") pod "67b7043f-a615-4a79-9704-40f367e3c4ce" (UID: "67b7043f-a615-4a79-9704-40f367e3c4ce"). InnerVolumeSpecName "kube-api-access-rgc9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.480241 4792 scope.go:117] "RemoveContainer" containerID="330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.523888 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgc9c\" (UniqueName: \"kubernetes.io/projected/67b7043f-a615-4a79-9704-40f367e3c4ce-kube-api-access-rgc9c\") on node \"crc\" DevicePath \"\"" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.613821 4792 scope.go:117] "RemoveContainer" containerID="75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2" Dec 02 19:53:09 crc kubenswrapper[4792]: E1202 19:53:09.614231 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2\": container with ID starting with 75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2 not found: ID does not exist" containerID="75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.614266 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2"} err="failed to get container status \"75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2\": rpc error: code = NotFound desc = could not find container \"75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2\": container with ID starting with 75db2b94378842d96b36283e4f4bb8479ca421b4ed6c8c461b8de78e98f857b2 not found: ID does not exist" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.614293 4792 scope.go:117] "RemoveContainer" containerID="5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1" Dec 02 19:53:09 crc kubenswrapper[4792]: E1202 19:53:09.614591 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1\": container with ID starting with 5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1 not found: ID does not exist" containerID="5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.614633 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1"} err="failed to get container status \"5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1\": rpc error: code = NotFound desc = could not find container \"5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1\": container with ID starting with 5b5ec4e0ede1f920b6df71800277628522c6d97f9fbf2ac0cbc6874dd60086a1 not found: ID does not exist" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.614661 4792 scope.go:117] "RemoveContainer" containerID="330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77" Dec 02 19:53:09 crc kubenswrapper[4792]: E1202 19:53:09.614935 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77\": container with ID starting with 330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77 not found: ID does not exist" containerID="330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.614960 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77"} err="failed to get container status \"330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77\": rpc error: code = NotFound desc = could not find container \"330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77\": container with ID starting with 330dd0e48e811cd41fa43a850b3f0758a045075f7d719b0fcf0def9ab8a25a77 not found: ID does not exist" Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.668007 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5vg6k"] Dec 02 19:53:09 crc kubenswrapper[4792]: I1202 19:53:09.676779 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5vg6k"] Dec 02 19:53:11 crc kubenswrapper[4792]: I1202 19:53:11.561084 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" path="/var/lib/kubelet/pods/67b7043f-a615-4a79-9704-40f367e3c4ce/volumes" Dec 02 19:53:12 crc kubenswrapper[4792]: I1202 19:53:12.540736 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:53:12 crc kubenswrapper[4792]: E1202 19:53:12.541601 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:53:24 crc kubenswrapper[4792]: I1202 19:53:24.539881 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:53:24 crc kubenswrapper[4792]: E1202 19:53:24.540869 4792 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-wpdh4_openshift-machine-config-operator(f3a866a8-b9d9-4a3a-a721-9fe56db62c1f)\"" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" podUID="f3a866a8-b9d9-4a3a-a721-9fe56db62c1f" Dec 02 19:53:39 crc kubenswrapper[4792]: I1202 19:53:39.553392 4792 scope.go:117] "RemoveContainer" containerID="c44aa25eeed6cd154dd69fd2371d893756cf698611eb58116f99b003da1f4624" Dec 02 19:53:39 crc kubenswrapper[4792]: I1202 19:53:39.600425 4792 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pod67b7043f-a615-4a79-9704-40f367e3c4ce"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pod67b7043f-a615-4a79-9704-40f367e3c4ce] : Timed out while waiting for systemd to remove kubepods-burstable-pod67b7043f_a615_4a79_9704_40f367e3c4ce.slice" Dec 02 19:53:40 crc kubenswrapper[4792]: I1202 19:53:40.390106 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-wpdh4" event={"ID":"f3a866a8-b9d9-4a3a-a721-9fe56db62c1f","Type":"ContainerStarted","Data":"8791c4ce3e2f3c634f4e6361cccff436d3d64c5154dff6b2b1534b95873acfe0"} Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.651547 4792 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xqsx6"] Dec 02 19:53:41 crc kubenswrapper[4792]: E1202 19:53:41.652510 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="extract-utilities" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.652543 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="extract-utilities" Dec 02 19:53:41 crc kubenswrapper[4792]: E1202 19:53:41.652587 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="registry-server" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.652595 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="registry-server" Dec 02 19:53:41 crc kubenswrapper[4792]: E1202 19:53:41.652610 4792 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="extract-content" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.652619 4792 state_mem.go:107] "Deleted CPUSet assignment" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="extract-content" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.652857 4792 memory_manager.go:354] "RemoveStaleState removing state" podUID="67b7043f-a615-4a79-9704-40f367e3c4ce" containerName="registry-server" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.654757 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.668861 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqsx6"] Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.774953 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-utilities\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.775137 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbnxb\" (UniqueName: \"kubernetes.io/projected/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-kube-api-access-rbnxb\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.775360 4792 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-catalog-content\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.877590 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-utilities\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.877683 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbnxb\" (UniqueName: \"kubernetes.io/projected/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-kube-api-access-rbnxb\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.877749 4792 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-catalog-content\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.878197 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-utilities\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.878257 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-catalog-content\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.908984 4792 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbnxb\" (UniqueName: \"kubernetes.io/projected/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-kube-api-access-rbnxb\") pod \"community-operators-xqsx6\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:41 crc kubenswrapper[4792]: I1202 19:53:41.976161 4792 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:42 crc kubenswrapper[4792]: I1202 19:53:42.626086 4792 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqsx6"] Dec 02 19:53:43 crc kubenswrapper[4792]: I1202 19:53:43.420918 4792 generic.go:334] "Generic (PLEG): container finished" podID="53295bd3-76aa-4a1b-88ae-04bb991e7bf4" containerID="99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b" exitCode=0 Dec 02 19:53:43 crc kubenswrapper[4792]: I1202 19:53:43.421039 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqsx6" event={"ID":"53295bd3-76aa-4a1b-88ae-04bb991e7bf4","Type":"ContainerDied","Data":"99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b"} Dec 02 19:53:43 crc kubenswrapper[4792]: I1202 19:53:43.421261 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqsx6" event={"ID":"53295bd3-76aa-4a1b-88ae-04bb991e7bf4","Type":"ContainerStarted","Data":"d14f1f06c8807f4c58894598f01cc7f90d321e5e3dde6bc75c6891cc94f0ab08"} Dec 02 19:53:43 crc kubenswrapper[4792]: I1202 19:53:43.424279 4792 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 19:53:45 crc kubenswrapper[4792]: I1202 19:53:45.449618 4792 generic.go:334] "Generic (PLEG): container finished" podID="53295bd3-76aa-4a1b-88ae-04bb991e7bf4" containerID="51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9" exitCode=0 Dec 02 19:53:45 crc kubenswrapper[4792]: I1202 19:53:45.449880 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqsx6" event={"ID":"53295bd3-76aa-4a1b-88ae-04bb991e7bf4","Type":"ContainerDied","Data":"51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9"} Dec 02 19:53:47 crc kubenswrapper[4792]: I1202 19:53:47.477584 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqsx6" event={"ID":"53295bd3-76aa-4a1b-88ae-04bb991e7bf4","Type":"ContainerStarted","Data":"6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408"} Dec 02 19:53:47 crc kubenswrapper[4792]: I1202 19:53:47.516812 4792 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xqsx6" podStartSLOduration=4.050245069 podStartE2EDuration="6.516776245s" podCreationTimestamp="2025-12-02 19:53:41 +0000 UTC" firstStartedPulling="2025-12-02 19:53:43.423761183 +0000 UTC m=+4654.196653521" lastFinishedPulling="2025-12-02 19:53:45.890292369 +0000 UTC m=+4656.663184697" observedRunningTime="2025-12-02 19:53:47.496996895 +0000 UTC m=+4658.269889233" watchObservedRunningTime="2025-12-02 19:53:47.516776245 +0000 UTC m=+4658.289668613" Dec 02 19:53:51 crc kubenswrapper[4792]: I1202 19:53:51.977086 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:51 crc kubenswrapper[4792]: I1202 19:53:51.977772 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:52 crc kubenswrapper[4792]: I1202 19:53:52.056104 4792 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:52 crc kubenswrapper[4792]: I1202 19:53:52.597008 4792 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:52 crc kubenswrapper[4792]: I1202 19:53:52.662814 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqsx6"] Dec 02 19:53:54 crc kubenswrapper[4792]: I1202 19:53:54.561478 4792 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xqsx6" podUID="53295bd3-76aa-4a1b-88ae-04bb991e7bf4" containerName="registry-server" containerID="cri-o://6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408" gracePeriod=2 Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.114573 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.276562 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-utilities\") pod \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.276809 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-catalog-content\") pod \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.276843 4792 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbnxb\" (UniqueName: \"kubernetes.io/projected/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-kube-api-access-rbnxb\") pod \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\" (UID: \"53295bd3-76aa-4a1b-88ae-04bb991e7bf4\") " Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.277954 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-utilities" (OuterVolumeSpecName: "utilities") pod "53295bd3-76aa-4a1b-88ae-04bb991e7bf4" (UID: "53295bd3-76aa-4a1b-88ae-04bb991e7bf4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.278772 4792 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.291863 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-kube-api-access-rbnxb" (OuterVolumeSpecName: "kube-api-access-rbnxb") pod "53295bd3-76aa-4a1b-88ae-04bb991e7bf4" (UID: "53295bd3-76aa-4a1b-88ae-04bb991e7bf4"). InnerVolumeSpecName "kube-api-access-rbnxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.353075 4792 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53295bd3-76aa-4a1b-88ae-04bb991e7bf4" (UID: "53295bd3-76aa-4a1b-88ae-04bb991e7bf4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.381390 4792 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.381424 4792 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbnxb\" (UniqueName: \"kubernetes.io/projected/53295bd3-76aa-4a1b-88ae-04bb991e7bf4-kube-api-access-rbnxb\") on node \"crc\" DevicePath \"\"" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.576418 4792 generic.go:334] "Generic (PLEG): container finished" podID="53295bd3-76aa-4a1b-88ae-04bb991e7bf4" containerID="6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408" exitCode=0 Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.576464 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqsx6" event={"ID":"53295bd3-76aa-4a1b-88ae-04bb991e7bf4","Type":"ContainerDied","Data":"6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408"} Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.576492 4792 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqsx6" event={"ID":"53295bd3-76aa-4a1b-88ae-04bb991e7bf4","Type":"ContainerDied","Data":"d14f1f06c8807f4c58894598f01cc7f90d321e5e3dde6bc75c6891cc94f0ab08"} Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.576513 4792 scope.go:117] "RemoveContainer" containerID="6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.576789 4792 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqsx6" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.614549 4792 scope.go:117] "RemoveContainer" containerID="51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.636845 4792 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqsx6"] Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.652776 4792 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xqsx6"] Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.657689 4792 scope.go:117] "RemoveContainer" containerID="99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.708313 4792 scope.go:117] "RemoveContainer" containerID="6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408" Dec 02 19:53:55 crc kubenswrapper[4792]: E1202 19:53:55.708950 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408\": container with ID starting with 6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408 not found: ID does not exist" containerID="6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.708982 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408"} err="failed to get container status \"6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408\": rpc error: code = NotFound desc = could not find container \"6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408\": container with ID starting with 6d5f760939b4365362c31cba8513ec0026fa2dcac10abe8e36cbf810298b5408 not found: ID does not exist" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.709004 4792 scope.go:117] "RemoveContainer" containerID="51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9" Dec 02 19:53:55 crc kubenswrapper[4792]: E1202 19:53:55.712912 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9\": container with ID starting with 51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9 not found: ID does not exist" containerID="51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.712966 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9"} err="failed to get container status \"51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9\": rpc error: code = NotFound desc = could not find container \"51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9\": container with ID starting with 51ac9a08562737e01858ae97338db97ec46582404f5ce51f399fbc1e502dbad9 not found: ID does not exist" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.712997 4792 scope.go:117] "RemoveContainer" containerID="99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b" Dec 02 19:53:55 crc kubenswrapper[4792]: E1202 19:53:55.713359 4792 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b\": container with ID starting with 99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b not found: ID does not exist" containerID="99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b" Dec 02 19:53:55 crc kubenswrapper[4792]: I1202 19:53:55.713387 4792 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b"} err="failed to get container status \"99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b\": rpc error: code = NotFound desc = could not find container \"99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b\": container with ID starting with 99bfbe3ea6d28bd8e79c0097d52d25ff4fb65efe353f69075423d95b03ed464b not found: ID does not exist" Dec 02 19:53:57 crc kubenswrapper[4792]: I1202 19:53:57.565014 4792 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53295bd3-76aa-4a1b-88ae-04bb991e7bf4" path="/var/lib/kubelet/pods/53295bd3-76aa-4a1b-88ae-04bb991e7bf4/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113642050024442 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113642051017360 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113630322016501 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113630323015452 5ustar corecore